dynflow 1.6.1 → 1.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f660ffddfd3e3c7ea4b8414d2e6f27f48d897d34280ff3bec3c8ab6e360b0bfe
4
- data.tar.gz: 46cd643c84f09640a07f00eafc3f4d3feda65746d3c353338d8141277fd77e03
3
+ metadata.gz: '068120950f509e0e118319244c51c2cc1c593b95c808e550636515dace4a984b'
4
+ data.tar.gz: 0a7b42cc5c57ba401c983582cebd1b428c6a9876d6187f3e511bcf5b82f33d4e
5
5
  SHA512:
6
- metadata.gz: 99e59816fbf809403a87bbb1f96ff39958d3d059572de7a66f6205df7c48867baa50d77636d287aebb8c69d73db2cef686860520c6e7db27dbfa8217f5570715
7
- data.tar.gz: 315b37f302e1c29eae52d00acabd82b605d88b7982ba3e01c920dd18619967115875ec5a1facffc6299280f01aaa61db8162ce3e02bbbd0ba06a95add1b88d64
6
+ metadata.gz: fa1c11576f57765c44d2efb27d80237e07e07733da768e79d2622cce9e5cdea295ae18894601905a431632c3a261668bb65a851f340baec681bbabec3e2d7dc1
7
+ data.tar.gz: 51d939e678a1e90880767b772d10f25fb69870da708180d9c07a42e46840428a929620206a5a05f1014c17bc7e5d7f71d8eff92e0c99654e1ec31a9ab73d2889
@@ -0,0 +1,48 @@
1
+ # workflow name
2
+ name: Generate release-artifacts
3
+
4
+ # on events
5
+ on:
6
+ push:
7
+ tags:
8
+ - '*'
9
+
10
+ # workflow tasks
11
+ jobs:
12
+ generate:
13
+ name: Generate build artifacts
14
+ runs-on: ubuntu-latest
15
+ steps:
16
+ - uses: olegtarasov/get-tag@v2.1
17
+ id: tagName
18
+ with:
19
+ tagRegex: "v(.*)" # Optional. Returns specified group text as tag name. Full tag string is returned if regex is not defined.
20
+ tagRegexGroup: 1 # Optional. Default is 1.
21
+ - name: Checkout the repository
22
+ uses: actions/checkout@v2
23
+ - name: Generate build files
24
+ run: |
25
+ mkdir -p dist
26
+ cd extras/expand
27
+ go build -o ../../dist/dynflow-expand-${VERSION}-x86_64
28
+ env:
29
+ VERSION: '${{ steps.tagName.outputs.tag }}'
30
+ - name: Generate distribution tarball
31
+ run: |
32
+ cd extras/expand
33
+ go mod vendor
34
+ tar --create \
35
+ --gzip \
36
+ --file ../../dist/dynflow-expand-${VERSION}.tar.gz \
37
+ --transform s/^\./dynflow-expand-${VERSION}/ \
38
+ .
39
+ env:
40
+ VERSION: '${{ steps.tagName.outputs.tag }}'
41
+ - name: Upload binaries to release
42
+ uses: svenstaro/upload-release-action@v2
43
+ with:
44
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
45
+ file: dist/*
46
+ tag: ${{ github.ref }}
47
+ overwrite: true
48
+ file_glob: true
data/Gemfile CHANGED
@@ -35,7 +35,7 @@ end
35
35
 
36
36
  group :rails do
37
37
  gem 'daemons'
38
- gem 'rails', '>= 4.2.9'
38
+ gem 'rails', '>= 4.2.9', '< 7'
39
39
  gem 'logging'
40
40
  end
41
41
 
data/dynflow.gemspec CHANGED
@@ -20,7 +20,7 @@ Gem::Specification.new do |s|
20
20
  s.required_ruby_version = '>= 2.3.0'
21
21
 
22
22
  s.add_dependency "multi_json"
23
- s.add_dependency "msgpack", '~> 1.3.3'
23
+ s.add_dependency "msgpack", '~> 1.3', '>= 1.3.3'
24
24
  s.add_dependency "apipie-params"
25
25
  s.add_dependency "algebrick", '~> 0.7.0'
26
26
  s.add_dependency "concurrent-ruby", '~> 1.1.3'
@@ -0,0 +1,9 @@
1
+ FROM alpine:3.15 as builder
2
+ RUN apk add -U go
3
+ ADD ./ work/
4
+ RUN cd /work && \
5
+ go build
6
+
7
+ FROM scratch
8
+ COPY --from=builder /work/expand /expand
9
+ CMD ["/expand"]
@@ -0,0 +1,25 @@
1
+ # expand
2
+
3
+ For a long time, Dynflow's database schema remained stable. To optimize Dynflow
4
+ a bit, we started changing it. One of the changes was changing how we encode
5
+ flows, resulting in flows taking roughly 10x less space.
6
+
7
+ The other change is not merged yet, but has potentionally bigger impact. We
8
+ store certain columns as JSON objects. The upcoming change uses msgpack instead
9
+ of JSON, resulting in faster encoding and decoding times and smaller storage
10
+ footprint when encoded. The drawback is it is a binary format, so if someone
11
+ dumps the tables from DB as CSV, they won't be human readable.
12
+
13
+ This tool processes CSV DB dumps and decodes msgpack to json.
14
+
15
+ ## Usage
16
+
17
+ ```shell
18
+ # cat dynflow_execution_plans.csv
19
+ 2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9283a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d34363565316466346561306183a474696d65ce60795de9a46e616d65a774696d656f7574a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
20
+ 6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9183a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
21
+
22
+ # expand < dynflow_execution_plans.csv
23
+ 2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""},{""name"":""timeout"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
24
+ 6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
25
+ ```
@@ -0,0 +1,5 @@
1
+ module github.com/dynflow/dynflow/expand
2
+
3
+ go 1.15
4
+
5
+ require github.com/vmihailenco/msgpack/v5 v5.3.5
@@ -0,0 +1,11 @@
1
+ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
2
+ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
3
+ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
4
+ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
5
+ github.com/vmihailenco/msgpack v3.3.3+incompatible h1:wapg9xDUZDzGCNFlwc5SqI1rvcciqcxEHac4CYj89xI=
6
+ github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU=
7
+ github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc=
8
+ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
9
+ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
10
+ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
11
+ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -4,47 +4,17 @@ import (
4
4
  "encoding/csv"
5
5
  "encoding/hex"
6
6
  "encoding/json"
7
- "github.com/vmihailenco/msgpack"
7
+ "github.com/vmihailenco/msgpack/v5"
8
8
  "io"
9
9
  "os"
10
10
  )
11
11
 
12
- // dynflow_steps
13
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
14
- // execution_plan_uuid,id,action_id,data,state,started_at,ended_at,real_time,execution_time,progress_done,progress_weight,class,error,action_class,children,queue
15
- //
16
- // encoded columns are:
17
- // 3 - data
18
- // 12 - error
19
- // 14 - children
20
-
21
- // dynflow_actions
22
- // 0 1 2 3 4 5 6 7 8 9 10
23
- // execution_plan_uuid,id,data,caller_execution_plan_id,caller_action_id,class,input,output,plan_step_id,run_step_id,finalize_step_id
24
- //
25
- // encoded columns are:
26
- // 2 - data
27
- // 6 - input
28
- // 7 - output
29
-
30
- // dynflow_execution_plans
31
- // Without msgpack
32
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
33
- // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,run_flow,finalize_flow,execution_history,root_plan_step_id,step_ids
34
-
35
- // With msgpack
36
- // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
37
- // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,root_plan_step_id,run_flow,finalize_flow,execution_history,step_ids
38
- //
39
- // 1 - data
40
- // 11 - run_flow
41
- // 12 - finalize_flow
42
- // 13 - execution_history
43
- // 14 - step_ids
44
-
45
12
  func main() {
46
13
  reader := csv.NewReader(os.Stdin)
14
+ defer os.Stdin.Close()
15
+
47
16
  writer := csv.NewWriter(os.Stdout)
17
+ defer os.Stdout.Close()
48
18
  defer writer.Flush()
49
19
 
50
20
  for {
@@ -58,31 +28,12 @@ func main() {
58
28
  }
59
29
 
60
30
  func processRow(record []string) []string {
61
- // Execution plan exports have 15 fields, other exports have different counts
62
- if len(record) == 15 {
63
- record = expandExecutionPlan(record)
64
- }
65
-
66
31
  for i, r := range record {
67
- record[i] = reencodeField(r)
68
- }
69
-
70
- return record
71
- }
72
-
73
- func expandExecutionPlan(record []string) []string {
74
- var flow_columns [2]int
75
-
76
- // The step_ids field should be a safe indicator
77
- if isHexEncoded(record[14]) {
78
- flow_columns = [...]int{11, 12}
79
- } else {
80
- flow_columns = [...]int{10, 11}
32
+ if isHexEncoded(r) {
33
+ record[i] = reencodeField(r)
34
+ }
81
35
  }
82
36
 
83
- for _, i := range flow_columns {
84
- record[i] = expandFlow(record[i])
85
- }
86
37
  return record
87
38
  }
88
39
 
@@ -91,38 +42,18 @@ func isHexEncoded(field string) bool {
91
42
  }
92
43
 
93
44
  func reencodeField(field string) string {
94
- decoded, err := decode(field)
45
+ decoded_bytes, err := hex.DecodeString(field[2:])
95
46
  if err != nil {
96
47
  return field
97
48
  }
98
49
 
99
- return encode(decoded)
100
- }
101
-
102
- func decode(field string) (interface{}, error) {
103
50
  var intermediate interface{}
104
- bytes := []byte(field)
105
-
106
- if isHexEncoded(field) {
107
- decoded_bytes, err := hex.DecodeString(field[2:])
108
- if err != nil {
109
- return "", err
110
- }
111
-
112
- err = msgpack.Unmarshal(decoded_bytes, &intermediate)
113
- if err != nil {
114
- return "", err
115
- }
116
-
117
- return intermediate, nil
118
- }
119
-
120
- err := json.Unmarshal(bytes, &intermediate)
51
+ err = msgpack.Unmarshal(decoded_bytes, &intermediate)
121
52
  if err != nil {
122
- return "", err
53
+ return field
123
54
  }
124
55
 
125
- return intermediate, nil
56
+ return encode(intermediate)
126
57
  }
127
58
 
128
59
  func encode(data interface{}) string {
@@ -133,48 +64,3 @@ func encode(data interface{}) string {
133
64
 
134
65
  return string(result)
135
66
  }
136
-
137
- func expandFlow(field string) string {
138
- intermediate, err := decode(field)
139
- if err != nil {
140
- return field
141
- }
142
-
143
- var result map[string]interface{}
144
- switch intermediate.(type) {
145
- // old style hash
146
- case map[string]interface{}:
147
- result = intermediate.(map[string]interface{})
148
- // newer compact S-expression like representation
149
- case []interface{}, float64:
150
- result = expandCompactFlow(intermediate)
151
- }
152
-
153
- return encode(result)
154
- }
155
-
156
- func expandCompactFlow(flow interface{}) map[string]interface{} {
157
- result := make(map[string]interface{})
158
- switch flow.(type) {
159
- case []interface{}:
160
- switch flow.([]interface{})[0] {
161
- case "S":
162
- result["class"] = "Dynflow::Flows::Sequence"
163
- case "C":
164
- result["class"] = "Dynflow::Flows::Concurrence"
165
- default:
166
- panic("Unknown flow type")
167
- }
168
- var subflows []interface{}
169
- for subflow := range flow.([]interface{})[1:] {
170
- subflows = append(subflows, expandCompactFlow(subflow))
171
- }
172
- result["flows"] = subflows
173
- case float64, int:
174
- result["class"] = "Dynflow::Flows::Atom"
175
- result["step_id"] = flow
176
- default:
177
- panic("Unknown flow type")
178
- }
179
- return result
180
- }
@@ -69,7 +69,7 @@ module Dynflow
69
69
  action.send(hook, execution_plan)
70
70
  rescue => e
71
71
  execution_plan.logger.error "Failed to run hook '#{hook}' for action '#{action.class}'"
72
- execution_plan.logger.debug e
72
+ execution_plan.logger.error e
73
73
  end
74
74
  end
75
75
  end
@@ -1,65 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'multi_json'
4
- require 'msgpack'
5
-
6
- def table_pkeys(table)
7
- case table
8
- when :dynflow_execution_plans
9
- [:uuid]
10
- when :dynflow_actions, :dynflow_steps
11
- [:execution_plan_uuid, :id]
12
- when :dynflow_coordinator_records
13
- [:id, :class]
14
- when :dynflow_delayed_plans
15
- [:execution_plan_uuid]
16
- when :dynflow_envelopes
17
- [:id]
18
- when :dynflow_output_chunks
19
- [:chunk]
20
- else
21
- raise "Unknown table '#{table}'"
22
- end
23
- end
24
-
25
- def conditions_for_row(table, row)
26
- row.slice(*table_pkeys(table))
27
- end
28
-
29
- def migrate_table(table, from_names, to_names, new_type)
30
- alter_table(table) do
31
- to_names.each do |new|
32
- add_column new, new_type
33
- end
34
- end
35
-
36
- relevant_columns = table_pkeys(table) | from_names
37
-
38
- from(table).select(*relevant_columns).each do |row|
39
- update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
40
- row[from].nil? ? acc : acc.merge(to => yield(row[from]))
41
- end
42
- next if update.empty?
43
- from(table).where(conditions_for_row(table, row)).update(update)
44
- end
45
-
46
- from_names.zip(to_names).each do |old, new|
47
- alter_table(table) do
48
- drop_column old
49
- end
50
-
51
- if database_type == :mysql
52
- type = new_type == File ? 'blob' : 'mediumtext'
53
- run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
54
- else
55
- rename_column table, new, old
56
- end
57
- end
58
- end
3
+ require_relative 'msgpack_migration_helper'
59
4
 
60
5
  Sequel.migration do
61
-
62
- TABLES = {
6
+ helper = MsgpackMigrationHelper.new({
63
7
  :dynflow_actions => [:data, :input, :output],
64
8
  :dynflow_coordinator_records => [:data],
65
9
  :dynflow_delayed_plans => [:serialized_args, :data],
@@ -67,24 +11,13 @@ Sequel.migration do
67
11
  :dynflow_execution_plans => [:run_flow, :finalize_flow, :execution_history, :step_ids],
68
12
  :dynflow_steps => [:error, :children],
69
13
  :dynflow_output_chunks => [:chunk]
70
- }
14
+ })
71
15
 
72
16
  up do
73
- TABLES.each do |table, columns|
74
- new_columns = columns.map { |c| "#{c}_blob" }
75
-
76
- migrate_table table, columns, new_columns, File do |data|
77
- ::Sequel.blob(MessagePack.pack(MultiJson.load(data)))
78
- end
79
- end
17
+ helper.up(self)
80
18
  end
81
19
 
82
20
  down do
83
- TABLES.each do |table, columns|
84
- new_columns = columns.map { |c| c + '_text' }
85
- migrate_table table, columns, new_columns, String do |data|
86
- MultiJson.dump(MessagePack.unpack(data))
87
- end
88
- end
21
+ helper.down(self)
89
22
  end
90
23
  end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'msgpack_migration_helper'
4
+
5
+ Sequel.migration do
6
+ helper = MsgpackMigrationHelper.new({
7
+ :dynflow_execution_plans => [:data],
8
+ :dynflow_steps => [:data]
9
+ })
10
+
11
+ up do
12
+ helper.up(self)
13
+ end
14
+
15
+ down do
16
+ helper.down(self)
17
+ end
18
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'msgpack'
5
+
6
+ class MsgpackMigrationHelper
7
+ def initialize(tables)
8
+ @tables = tables
9
+ end
10
+
11
+ def up(migration)
12
+ @tables.each do |table, columns|
13
+ new_columns = columns.map { |c| "#{c}_blob" }
14
+
15
+ migrate_table migration, table, columns, new_columns, File do |data|
16
+ ::Sequel.blob(MessagePack.pack(JSON.parse(data)))
17
+ end
18
+ end
19
+ end
20
+
21
+ def down(migration)
22
+ @tables.each do |table, columns|
23
+ new_columns = columns.map { |c| c + '_text' }
24
+ migrate_table migration, table, columns, new_columns, String do |data|
25
+ JSON.dump(MessagePack.unpack(data))
26
+ end
27
+ end
28
+ end
29
+
30
+ private
31
+
32
+ def migrate_table(migration, table, from_names, to_names, new_type)
33
+ migration.alter_table(table) do
34
+ to_names.each do |new|
35
+ add_column new, new_type
36
+ end
37
+ end
38
+
39
+ relevant_columns = table_pkeys(table) | from_names
40
+
41
+ migration.from(table).select(*relevant_columns).each do |row|
42
+ update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
43
+ row[from].nil? ? acc : acc.merge(to => yield(row[from]))
44
+ end
45
+ next if update.empty?
46
+ migration.from(table).where(conditions_for_row(table, row)).update(update)
47
+ end
48
+
49
+ from_names.zip(to_names).each do |old, new|
50
+ migration.alter_table(table) do
51
+ drop_column old
52
+ end
53
+
54
+ if migration.database_type == :mysql
55
+ type = new_type == File ? 'blob' : 'mediumtext'
56
+ run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
57
+ else
58
+ migration.rename_column table, new, old
59
+ end
60
+ end
61
+ end
62
+
63
+ def conditions_for_row(table, row)
64
+ row.slice(*table_pkeys(table))
65
+ end
66
+
67
+ def table_pkeys(table)
68
+ case table
69
+ when :dynflow_execution_plans
70
+ [:uuid]
71
+ when :dynflow_actions, :dynflow_steps
72
+ [:execution_plan_uuid, :id]
73
+ when :dynflow_coordinator_records
74
+ [:id, :class]
75
+ when :dynflow_delayed_plans
76
+ [:execution_plan_uuid]
77
+ when :dynflow_envelopes
78
+ [:id]
79
+ when :dynflow_output_chunks
80
+ [:id]
81
+ else
82
+ raise "Unknown table '#{table}'"
83
+ end
84
+ end
85
+ end
@@ -158,7 +158,7 @@ module Dynflow
158
158
  protected
159
159
 
160
160
  def default_sequel_adapter_options(world)
161
- db_config = ::ActiveRecord::Base.configurations[::Rails.env].dup
161
+ db_config = ::Rails.application.config.database_configuration[::Rails.env].dup
162
162
  db_config['adapter'] = db_config['adapter'].gsub(/_?makara_?/, '')
163
163
  db_config['adapter'] = 'postgres' if db_config['adapter'] == 'postgresql'
164
164
  db_config['max_connections'] = calculate_db_pool_size(world) if increase_db_pool_size?
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
- VERSION = '1.6.1'
3
+ VERSION = '1.6.4'
4
4
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dynflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.1
4
+ version: 1.6.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Necas
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2021-09-24 00:00:00.000000000 Z
12
+ date: 2022-02-28 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: multi_json
@@ -30,6 +30,9 @@ dependencies:
30
30
  requirement: !ruby/object:Gem::Requirement
31
31
  requirements:
32
32
  - - "~>"
33
+ - !ruby/object:Gem::Version
34
+ version: '1.3'
35
+ - - ">="
33
36
  - !ruby/object:Gem::Version
34
37
  version: 1.3.3
35
38
  type: :runtime
@@ -37,6 +40,9 @@ dependencies:
37
40
  version_requirements: !ruby/object:Gem::Requirement
38
41
  requirements:
39
42
  - - "~>"
43
+ - !ruby/object:Gem::Version
44
+ version: '1.3'
45
+ - - ">="
40
46
  - !ruby/object:Gem::Version
41
47
  version: 1.3.3
42
48
  - !ruby/object:Gem::Dependency
@@ -257,6 +263,7 @@ extensions: []
257
263
  extra_rdoc_files: []
258
264
  files:
259
265
  - ".github/install_dependencies.sh"
266
+ - ".github/workflows/release.yml"
260
267
  - ".github/workflows/ruby.yml"
261
268
  - ".gitignore"
262
269
  - ".rubocop.yml"
@@ -413,6 +420,10 @@ files:
413
420
  - examples/sub_plan_concurrency_control.rb
414
421
  - examples/sub_plans.rb
415
422
  - examples/termination.rb
423
+ - extras/expand/Dockerfile
424
+ - extras/expand/README.md
425
+ - extras/expand/go.mod
426
+ - extras/expand/go.sum
416
427
  - extras/expand/main.go
417
428
  - extras/statsd_mapping.conf
418
429
  - lib/dynflow.rb
@@ -536,6 +547,8 @@ files:
536
547
  - lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb
537
548
  - lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb
538
549
  - lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb
550
+ - lib/dynflow/persistence_adapters/sequel_migrations/024_store_execution_plan_data_as_msgpack.rb
551
+ - lib/dynflow/persistence_adapters/sequel_migrations/msgpack_migration_helper.rb
539
552
  - lib/dynflow/rails.rb
540
553
  - lib/dynflow/rails/configuration.rb
541
554
  - lib/dynflow/rails/daemon.rb