dynflow 1.6.2 → 1.6.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f0e5c6a8141eaea583bebabd135cdbfcff6c2516410db7930595e3868383f331
4
- data.tar.gz: bc72a36dd5284f5cea95651de2620f0d41db5f315195a4ed573155c94b510bb9
3
+ metadata.gz: 6d8777e2b8ec72f0cb4976e3469a0504129588c37b78de71c95bd0aeb378d698
4
+ data.tar.gz: 457be311485f9d2990feb5a1f8011506c27f0714eb1b77e5150c53b43d187b02
5
5
  SHA512:
6
- metadata.gz: c9b0efe531cf9d3c45432bfc94542f47ff6a72fe20bd96a4541242f58a13abc77603a62fcb5294e481feb9a28c49c6eebd2a9747092da6fe7a5e67ad1dc8e9d3
7
- data.tar.gz: d9c66e79fad07d6e6bfb9b8e265af0062adf8ba2b0d8d126e46080ca890200db07346f7c4702518a9c4d4ace4ae94d668ce20761166c9e81a7ef5531cdc85240
6
+ metadata.gz: 4e2f820dbd02c340ac001d70424026ab71ef16e49b5fb4ea5a946428b7b8bfb3532ac659549c726ffd263ef3a38f679e09e4be6b1f24b9275670552fcb9439fd
7
+ data.tar.gz: 07e329f9a1a40638fed22525e001a87bdbb00cc9009984116b04bc2cece00e7171fbb868ffb3f49c38907a6683879c0fa726ce99c9c02506d13becf5ce17d111
@@ -0,0 +1,48 @@
1
+ # workflow name
2
+ name: Generate release-artifacts
3
+
4
+ # on events
5
+ on:
6
+ push:
7
+ tags:
8
+ - '*'
9
+
10
+ # workflow tasks
11
+ jobs:
12
+ generate:
13
+ name: Generate build artifacts
14
+ runs-on: ubuntu-latest
15
+ steps:
16
+ - uses: olegtarasov/get-tag@v2.1
17
+ id: tagName
18
+ with:
19
+ tagRegex: "v(.*)" # Optional. Returns specified group text as tag name. Full tag string is returned if regex is not defined.
20
+ tagRegexGroup: 1 # Optional. Default is 1.
21
+ - name: Checkout the repository
22
+ uses: actions/checkout@v2
23
+ - name: Generate build files
24
+ run: |
25
+ mkdir -p dist
26
+ cd extras/expand
27
+ go build -o ../../dist/dynflow-expand-${VERSION}-x86_64
28
+ env:
29
+ VERSION: '${{ steps.tagName.outputs.tag }}'
30
+ - name: Generate distribution tarball
31
+ run: |
32
+ cd extras/expand
33
+ go mod vendor
34
+ tar --create \
35
+ --gzip \
36
+ --file ../../dist/dynflow-expand-${VERSION}.tar.gz \
37
+ --transform s/^\./dynflow-expand-${VERSION}/ \
38
+ .
39
+ env:
40
+ VERSION: '${{ steps.tagName.outputs.tag }}'
41
+ - name: Upload binaries to release
42
+ uses: svenstaro/upload-release-action@v2
43
+ with:
44
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
45
+ file: dist/*
46
+ tag: ${{ github.ref }}
47
+ overwrite: true
48
+ file_glob: true
data/Gemfile CHANGED
@@ -35,7 +35,7 @@ end
35
35
 
36
36
  group :rails do
37
37
  gem 'daemons'
38
- gem 'rails', '>= 4.2.9'
38
+ gem 'rails', '>= 4.2.9', '< 7'
39
39
  gem 'logging'
40
40
  end
41
41
 
@@ -0,0 +1,9 @@
1
+ FROM alpine:3.15 as builder
2
+ RUN apk add -U go
3
+ ADD ./ work/
4
+ RUN cd /work && \
5
+ go build
6
+
7
+ FROM scratch
8
+ COPY --from=builder /work/expand /expand
9
+ CMD ["/expand"]
@@ -0,0 +1,25 @@
1
+ # expand
2
+
3
+ For a long time, Dynflow's database schema remained stable. To optimize Dynflow
4
+ a bit, we started changing it. One of the changes was changing how we encode
5
+ flows, resulting in flows taking roughly 10x less space.
6
+
7
+ The other change is not merged yet, but has potentionally bigger impact. We
8
+ store certain columns as JSON objects. The upcoming change uses msgpack instead
9
+ of JSON, resulting in faster encoding and decoding times and smaller storage
10
+ footprint when encoded. The drawback is it is a binary format, so if someone
11
+ dumps the tables from DB as CSV, they won't be human readable.
12
+
13
+ This tool processes CSV DB dumps and decodes msgpack to json.
14
+
15
+ ## Usage
16
+
17
+ ```shell
18
+ # cat dynflow_execution_plans.csv
19
+ 2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9283a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d34363565316466346561306183a474696d65ce60795de9a46e616d65a774696d656f7574a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
20
+ 6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9183a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
21
+
22
+ # expand < dynflow_execution_plans.csv
23
+ 2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""},{""name"":""timeout"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
24
+ 6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
25
+ ```
@@ -0,0 +1,5 @@
1
+ module github.com/dynflow/dynflow/expand
2
+
3
+ go 1.15
4
+
5
+ require github.com/vmihailenco/msgpack/v5 v5.3.5
@@ -0,0 +1,11 @@
1
+ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
2
+ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
3
+ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
4
+ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
5
+ github.com/vmihailenco/msgpack v3.3.3+incompatible h1:wapg9xDUZDzGCNFlwc5SqI1rvcciqcxEHac4CYj89xI=
6
+ github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU=
7
+ github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc=
8
+ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
9
+ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
10
+ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
11
+ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -4,47 +4,17 @@ import (
4
4
  "encoding/csv"
5
5
  "encoding/hex"
6
6
  "encoding/json"
7
- "github.com/vmihailenco/msgpack"
7
+ "github.com/vmihailenco/msgpack/v5"
8
8
  "io"
9
9
  "os"
10
10
  )
11
11
 
12
- // dynflow_steps
13
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
14
- // execution_plan_uuid,id,action_id,data,state,started_at,ended_at,real_time,execution_time,progress_done,progress_weight,class,error,action_class,children,queue
15
- //
16
- // encoded columns are:
17
- // 3 - data
18
- // 12 - error
19
- // 14 - children
20
-
21
- // dynflow_actions
22
- // 0 1 2 3 4 5 6 7 8 9 10
23
- // execution_plan_uuid,id,data,caller_execution_plan_id,caller_action_id,class,input,output,plan_step_id,run_step_id,finalize_step_id
24
- //
25
- // encoded columns are:
26
- // 2 - data
27
- // 6 - input
28
- // 7 - output
29
-
30
- // dynflow_execution_plans
31
- // Without msgpack
32
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
33
- // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,run_flow,finalize_flow,execution_history,root_plan_step_id,step_ids
34
-
35
- // With msgpack
36
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
37
- // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,root_plan_step_id,run_flow,finalize_flow,execution_history,step_ids
38
- //
39
- // 1 - data
40
- // 11 - run_flow
41
- // 12 - finalize_flow
42
- // 13 - execution_history
43
- // 14 - step_ids
44
-
45
12
  func main() {
46
13
  reader := csv.NewReader(os.Stdin)
14
+ defer os.Stdin.Close()
15
+
47
16
  writer := csv.NewWriter(os.Stdout)
17
+ defer os.Stdout.Close()
48
18
  defer writer.Flush()
49
19
 
50
20
  for {
@@ -58,31 +28,12 @@ func main() {
58
28
  }
59
29
 
60
30
  func processRow(record []string) []string {
61
- // Execution plan exports have 15 fields, other exports have different counts
62
- if len(record) == 15 {
63
- record = expandExecutionPlan(record)
64
- }
65
-
66
31
  for i, r := range record {
67
- record[i] = reencodeField(r)
68
- }
69
-
70
- return record
71
- }
72
-
73
- func expandExecutionPlan(record []string) []string {
74
- var flow_columns [2]int
75
-
76
- // The step_ids field should be a safe indicator
77
- if isHexEncoded(record[14]) {
78
- flow_columns = [...]int{11, 12}
79
- } else {
80
- flow_columns = [...]int{10, 11}
32
+ if isHexEncoded(r) {
33
+ record[i] = reencodeField(r)
34
+ }
81
35
  }
82
36
 
83
- for _, i := range flow_columns {
84
- record[i] = expandFlow(record[i])
85
- }
86
37
  return record
87
38
  }
88
39
 
@@ -91,38 +42,18 @@ func isHexEncoded(field string) bool {
91
42
  }
92
43
 
93
44
  func reencodeField(field string) string {
94
- decoded, err := decode(field)
45
+ decoded_bytes, err := hex.DecodeString(field[2:])
95
46
  if err != nil {
96
47
  return field
97
48
  }
98
49
 
99
- return encode(decoded)
100
- }
101
-
102
- func decode(field string) (interface{}, error) {
103
50
  var intermediate interface{}
104
- bytes := []byte(field)
105
-
106
- if isHexEncoded(field) {
107
- decoded_bytes, err := hex.DecodeString(field[2:])
108
- if err != nil {
109
- return "", err
110
- }
111
-
112
- err = msgpack.Unmarshal(decoded_bytes, &intermediate)
113
- if err != nil {
114
- return "", err
115
- }
116
-
117
- return intermediate, nil
118
- }
119
-
120
- err := json.Unmarshal(bytes, &intermediate)
51
+ err = msgpack.Unmarshal(decoded_bytes, &intermediate)
121
52
  if err != nil {
122
- return "", err
53
+ return field
123
54
  }
124
55
 
125
- return intermediate, nil
56
+ return encode(intermediate)
126
57
  }
127
58
 
128
59
  func encode(data interface{}) string {
@@ -133,48 +64,3 @@ func encode(data interface{}) string {
133
64
 
134
65
  return string(result)
135
66
  }
136
-
137
- func expandFlow(field string) string {
138
- intermediate, err := decode(field)
139
- if err != nil {
140
- return field
141
- }
142
-
143
- var result map[string]interface{}
144
- switch intermediate.(type) {
145
- // old style hash
146
- case map[string]interface{}:
147
- result = intermediate.(map[string]interface{})
148
- // newer compact S-expression like representation
149
- case []interface{}, float64:
150
- result = expandCompactFlow(intermediate)
151
- }
152
-
153
- return encode(result)
154
- }
155
-
156
- func expandCompactFlow(flow interface{}) map[string]interface{} {
157
- result := make(map[string]interface{})
158
- switch flow.(type) {
159
- case []interface{}:
160
- switch flow.([]interface{})[0] {
161
- case "S":
162
- result["class"] = "Dynflow::Flows::Sequence"
163
- case "C":
164
- result["class"] = "Dynflow::Flows::Concurrence"
165
- default:
166
- panic("Unknown flow type")
167
- }
168
- var subflows []interface{}
169
- for subflow := range flow.([]interface{})[1:] {
170
- subflows = append(subflows, expandCompactFlow(subflow))
171
- }
172
- result["flows"] = subflows
173
- case float64, int:
174
- result["class"] = "Dynflow::Flows::Atom"
175
- result["step_id"] = flow
176
- default:
177
- panic("Unknown flow type")
178
- }
179
- return result
180
- }
@@ -69,7 +69,7 @@ module Dynflow
69
69
  action.send(hook, execution_plan)
70
70
  rescue => e
71
71
  execution_plan.logger.error "Failed to run hook '#{hook}' for action '#{action.class}'"
72
- execution_plan.logger.debug e
72
+ execution_plan.logger.error e
73
73
  end
74
74
  end
75
75
  end
@@ -1,65 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'multi_json'
4
- require 'msgpack'
5
-
6
- def table_pkeys(table)
7
- case table
8
- when :dynflow_execution_plans
9
- [:uuid]
10
- when :dynflow_actions, :dynflow_steps
11
- [:execution_plan_uuid, :id]
12
- when :dynflow_coordinator_records
13
- [:id, :class]
14
- when :dynflow_delayed_plans
15
- [:execution_plan_uuid]
16
- when :dynflow_envelopes
17
- [:id]
18
- when :dynflow_output_chunks
19
- [:chunk]
20
- else
21
- raise "Unknown table '#{table}'"
22
- end
23
- end
24
-
25
- def conditions_for_row(table, row)
26
- row.slice(*table_pkeys(table))
27
- end
28
-
29
- def migrate_table(table, from_names, to_names, new_type)
30
- alter_table(table) do
31
- to_names.each do |new|
32
- add_column new, new_type
33
- end
34
- end
35
-
36
- relevant_columns = table_pkeys(table) | from_names
37
-
38
- from(table).select(*relevant_columns).each do |row|
39
- update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
40
- row[from].nil? ? acc : acc.merge(to => yield(row[from]))
41
- end
42
- next if update.empty?
43
- from(table).where(conditions_for_row(table, row)).update(update)
44
- end
45
-
46
- from_names.zip(to_names).each do |old, new|
47
- alter_table(table) do
48
- drop_column old
49
- end
50
-
51
- if database_type == :mysql
52
- type = new_type == File ? 'blob' : 'mediumtext'
53
- run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
54
- else
55
- rename_column table, new, old
56
- end
57
- end
58
- end
3
+ require_relative 'msgpack_migration_helper'
59
4
 
60
5
  Sequel.migration do
61
-
62
- TABLES = {
6
+ helper = MsgpackMigrationHelper.new({
63
7
  :dynflow_actions => [:data, :input, :output],
64
8
  :dynflow_coordinator_records => [:data],
65
9
  :dynflow_delayed_plans => [:serialized_args, :data],
@@ -67,24 +11,13 @@ Sequel.migration do
67
11
  :dynflow_execution_plans => [:run_flow, :finalize_flow, :execution_history, :step_ids],
68
12
  :dynflow_steps => [:error, :children],
69
13
  :dynflow_output_chunks => [:chunk]
70
- }
14
+ })
71
15
 
72
16
  up do
73
- TABLES.each do |table, columns|
74
- new_columns = columns.map { |c| "#{c}_blob" }
75
-
76
- migrate_table table, columns, new_columns, File do |data|
77
- ::Sequel.blob(MessagePack.pack(MultiJson.load(data)))
78
- end
79
- end
17
+ helper.up(self)
80
18
  end
81
19
 
82
20
  down do
83
- TABLES.each do |table, columns|
84
- new_columns = columns.map { |c| c + '_text' }
85
- migrate_table table, columns, new_columns, String do |data|
86
- MultiJson.dump(MessagePack.unpack(data))
87
- end
88
- end
21
+ helper.down(self)
89
22
  end
90
23
  end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'msgpack_migration_helper'
4
+
5
+ Sequel.migration do
6
+ helper = MsgpackMigrationHelper.new({
7
+ :dynflow_execution_plans => [:data],
8
+ :dynflow_steps => [:data]
9
+ })
10
+
11
+ up do
12
+ helper.up(self)
13
+ end
14
+
15
+ down do
16
+ helper.down(self)
17
+ end
18
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'msgpack'
5
+
6
+ class MsgpackMigrationHelper
7
+ def initialize(tables)
8
+ @tables = tables
9
+ end
10
+
11
+ def up(migration)
12
+ @tables.each do |table, columns|
13
+ new_columns = columns.map { |c| "#{c}_blob" }
14
+
15
+ migrate_table migration, table, columns, new_columns, File do |data|
16
+ ::Sequel.blob(MessagePack.pack(JSON.parse(data)))
17
+ end
18
+ end
19
+ end
20
+
21
+ def down(migration)
22
+ @tables.each do |table, columns|
23
+ new_columns = columns.map { |c| c + '_text' }
24
+ migrate_table migration, table, columns, new_columns, String do |data|
25
+ JSON.dump(MessagePack.unpack(data))
26
+ end
27
+ end
28
+ end
29
+
30
+ private
31
+
32
+ def migrate_table(migration, table, from_names, to_names, new_type)
33
+ migration.alter_table(table) do
34
+ to_names.each do |new|
35
+ add_column new, new_type
36
+ end
37
+ end
38
+
39
+ relevant_columns = table_pkeys(table) | from_names
40
+
41
+ migration.from(table).select(*relevant_columns).each do |row|
42
+ update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
43
+ row[from].nil? ? acc : acc.merge(to => yield(row[from]))
44
+ end
45
+ next if update.empty?
46
+ migration.from(table).where(conditions_for_row(table, row)).update(update)
47
+ end
48
+
49
+ from_names.zip(to_names).each do |old, new|
50
+ migration.alter_table(table) do
51
+ drop_column old
52
+ end
53
+
54
+ if migration.database_type == :mysql
55
+ type = new_type == File ? 'blob' : 'mediumtext'
56
+ run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
57
+ else
58
+ migration.rename_column table, new, old
59
+ end
60
+ end
61
+ end
62
+
63
+ def conditions_for_row(table, row)
64
+ row.slice(*table_pkeys(table))
65
+ end
66
+
67
+ def table_pkeys(table)
68
+ case table
69
+ when :dynflow_execution_plans
70
+ [:uuid]
71
+ when :dynflow_actions, :dynflow_steps
72
+ [:execution_plan_uuid, :id]
73
+ when :dynflow_coordinator_records
74
+ [:id, :class]
75
+ when :dynflow_delayed_plans
76
+ [:execution_plan_uuid]
77
+ when :dynflow_envelopes
78
+ [:id]
79
+ when :dynflow_output_chunks
80
+ [:id]
81
+ else
82
+ raise "Unknown table '#{table}'"
83
+ end
84
+ end
85
+ end
@@ -127,7 +127,7 @@ module Dynflow
127
127
  db_pool_size = calculate_db_pool_size(world)
128
128
  ::ActiveRecord::Base.connection_pool.disconnect!
129
129
 
130
- config = ::ActiveRecord::Base.configurations[::Rails.env]
130
+ config = ::ActiveRecord::Base.configurations.configs_for(env_name: ::Rails.env)[0].config.dup
131
131
  config['pool'] = db_pool_size if config['pool'].to_i < db_pool_size
132
132
  ::ActiveRecord::Base.establish_connection(config)
133
133
  end
@@ -158,7 +158,7 @@ module Dynflow
158
158
  protected
159
159
 
160
160
  def default_sequel_adapter_options(world)
161
- db_config = ::ActiveRecord::Base.configurations[::Rails.env].dup
161
+ db_config = ::ActiveRecord::Base.configurations.configs_for(env_name: ::Rails.env)[0].config.dup
162
162
  db_config['adapter'] = db_config['adapter'].gsub(/_?makara_?/, '')
163
163
  db_config['adapter'] = 'postgres' if db_config['adapter'] == 'postgresql'
164
164
  db_config['max_connections'] = calculate_db_pool_size(world) if increase_db_pool_size?
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
- VERSION = '1.6.2'
3
+ VERSION = '1.6.5'
4
4
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dynflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.2
4
+ version: 1.6.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Necas
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2021-12-10 00:00:00.000000000 Z
12
+ date: 2022-02-28 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: multi_json
@@ -263,6 +263,7 @@ extensions: []
263
263
  extra_rdoc_files: []
264
264
  files:
265
265
  - ".github/install_dependencies.sh"
266
+ - ".github/workflows/release.yml"
266
267
  - ".github/workflows/ruby.yml"
267
268
  - ".gitignore"
268
269
  - ".rubocop.yml"
@@ -419,6 +420,10 @@ files:
419
420
  - examples/sub_plan_concurrency_control.rb
420
421
  - examples/sub_plans.rb
421
422
  - examples/termination.rb
423
+ - extras/expand/Dockerfile
424
+ - extras/expand/README.md
425
+ - extras/expand/go.mod
426
+ - extras/expand/go.sum
422
427
  - extras/expand/main.go
423
428
  - extras/statsd_mapping.conf
424
429
  - lib/dynflow.rb
@@ -542,6 +547,8 @@ files:
542
547
  - lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb
543
548
  - lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb
544
549
  - lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb
550
+ - lib/dynflow/persistence_adapters/sequel_migrations/024_store_execution_plan_data_as_msgpack.rb
551
+ - lib/dynflow/persistence_adapters/sequel_migrations/msgpack_migration_helper.rb
545
552
  - lib/dynflow/rails.rb
546
553
  - lib/dynflow/rails/configuration.rb
547
554
  - lib/dynflow/rails/daemon.rb