dynflow 1.6.2 → 1.6.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/release.yml +48 -0
- data/Gemfile +1 -1
- data/extras/expand/Dockerfile +9 -0
- data/extras/expand/README.md +25 -0
- data/extras/expand/go.mod +5 -0
- data/extras/expand/go.sum +11 -0
- data/extras/expand/main.go +11 -125
- data/lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb +3 -3
- data/lib/dynflow/version.rb +1 -1
- metadata +7 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 40e7ab36f1ef2943d1cbebccde747a2c89186a620493a74bfefb06a810e8bf13
|
4
|
+
data.tar.gz: c2d4da5a39382f4df50ac0a1664edab9b772f94fc6da4f7f5eec51c24b91e256
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 955818d3401e8641df7ad06ad0307791502437ac3b2549dee9d1161c8dd29d76e83f03b378b6c451547d2e1a00f0b8bbdfa282ea9b6e3fc37005e1fde59ebff6
|
7
|
+
data.tar.gz: 2cb868cab752e95928521d1276bca3091f3ebbd55437e80a82baf980c636880dea3bdb3b345252e2735ffe236960c9d9c6d78a471acc9462c8341581ec91ff98
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# workflow name
|
2
|
+
name: Generate release-artifacts
|
3
|
+
|
4
|
+
# on events
|
5
|
+
on:
|
6
|
+
push:
|
7
|
+
tags:
|
8
|
+
- '*'
|
9
|
+
|
10
|
+
# workflow tasks
|
11
|
+
jobs:
|
12
|
+
generate:
|
13
|
+
name: Generate build artifacts
|
14
|
+
runs-on: ubuntu-latest
|
15
|
+
steps:
|
16
|
+
- uses: olegtarasov/get-tag@v2.1
|
17
|
+
id: tagName
|
18
|
+
with:
|
19
|
+
tagRegex: "v(.*)" # Optional. Returns specified group text as tag name. Full tag string is returned if regex is not defined.
|
20
|
+
tagRegexGroup: 1 # Optional. Default is 1.
|
21
|
+
- name: Checkout the repository
|
22
|
+
uses: actions/checkout@v2
|
23
|
+
- name: Generate build files
|
24
|
+
run: |
|
25
|
+
mkdir -p dist
|
26
|
+
cd extras/expand
|
27
|
+
go build -o ../../dist/dynflow-expand-${VERSION}-x86_64
|
28
|
+
env:
|
29
|
+
VERSION: '${{ steps.tagName.outputs.tag }}'
|
30
|
+
- name: Generate distribution tarball
|
31
|
+
run: |
|
32
|
+
cd extras/expand
|
33
|
+
go mod vendor
|
34
|
+
tar --create \
|
35
|
+
--gzip \
|
36
|
+
--file ../../dist/dynflow-expand-${VERSION}.tar.gz \
|
37
|
+
--transform s/^\./dynflow-expand-${VERSION}/ \
|
38
|
+
.
|
39
|
+
env:
|
40
|
+
VERSION: '${{ steps.tagName.outputs.tag }}'
|
41
|
+
- name: Upload binaries to release
|
42
|
+
uses: svenstaro/upload-release-action@v2
|
43
|
+
with:
|
44
|
+
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
45
|
+
file: dist/*
|
46
|
+
tag: ${{ github.ref }}
|
47
|
+
overwrite: true
|
48
|
+
file_glob: true
|
data/Gemfile
CHANGED
@@ -0,0 +1,25 @@
|
|
1
|
+
# expand
|
2
|
+
|
3
|
+
For a long time, Dynflow's database schema remained stable. To optimize Dynflow
|
4
|
+
a bit, we started changing it. One of the changes was changing how we encode
|
5
|
+
flows, resulting in flows taking roughly 10x less space.
|
6
|
+
|
7
|
+
The other change is not merged yet, but has potentionally bigger impact. We
|
8
|
+
store certain columns as JSON objects. The upcoming change uses msgpack instead
|
9
|
+
of JSON, resulting in faster encoding and decoding times and smaller storage
|
10
|
+
footprint when encoded. The drawback is it is a binary format, so if someone
|
11
|
+
dumps the tables from DB as CSV, they won't be human readable.
|
12
|
+
|
13
|
+
This tool processes CSV DB dumps and decodes msgpack to json.
|
14
|
+
|
15
|
+
## Usage
|
16
|
+
|
17
|
+
```shell
|
18
|
+
# cat dynflow_execution_plans.csv
|
19
|
+
2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9283a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d34363565316466346561306183a474696d65ce60795de9a46e616d65a774696d656f7574a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
|
20
|
+
6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,\x91a143,\x91a153,\x9183a474696d65ce60795de9a46e616d65a564656c6179a8776f726c645f6964d92435626536643435662d363732342d343666652d393035662d343635653164663465613061,\x9101
|
21
|
+
|
22
|
+
# expand < dynflow_execution_plans.csv
|
23
|
+
2065cc55-6b03-44b7-947a-e999dcb9057f,,stopped,error,,2021-04-16 09:50:33.826,0,0,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""},{""name"":""timeout"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
|
24
|
+
6667374a-beab-4b0b-80c8-3d0392cdde40,,scheduled,pending,,,0,,,Dynflow::ExecutionPlan,1,"[""C""]","[""S""]","[{""name"":""delay"",""time"":1618566633,""world_id"":""5be6d45f-6724-46fe-905f-465e1df4ea0a""}]",[1]
|
25
|
+
```
|
@@ -0,0 +1,11 @@
|
|
1
|
+
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
2
|
+
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
3
|
+
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
4
|
+
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
5
|
+
github.com/vmihailenco/msgpack v3.3.3+incompatible h1:wapg9xDUZDzGCNFlwc5SqI1rvcciqcxEHac4CYj89xI=
|
6
|
+
github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU=
|
7
|
+
github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc=
|
8
|
+
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
9
|
+
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
10
|
+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
11
|
+
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
data/extras/expand/main.go
CHANGED
@@ -4,47 +4,17 @@ import (
|
|
4
4
|
"encoding/csv"
|
5
5
|
"encoding/hex"
|
6
6
|
"encoding/json"
|
7
|
-
"github.com/vmihailenco/msgpack"
|
7
|
+
"github.com/vmihailenco/msgpack/v5"
|
8
8
|
"io"
|
9
9
|
"os"
|
10
10
|
)
|
11
11
|
|
12
|
-
// dynflow_steps
|
13
|
-
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
|
14
|
-
// execution_plan_uuid,id,action_id,data,state,started_at,ended_at,real_time,execution_time,progress_done,progress_weight,class,error,action_class,children,queue
|
15
|
-
//
|
16
|
-
// encoded columns are:
|
17
|
-
// 3 - data
|
18
|
-
// 12 - error
|
19
|
-
// 14 - children
|
20
|
-
|
21
|
-
// dynflow_actions
|
22
|
-
// 0 1 2 3 4 5 6 7 8 9 10
|
23
|
-
// execution_plan_uuid,id,data,caller_execution_plan_id,caller_action_id,class,input,output,plan_step_id,run_step_id,finalize_step_id
|
24
|
-
//
|
25
|
-
// encoded columns are:
|
26
|
-
// 2 - data
|
27
|
-
// 6 - input
|
28
|
-
// 7 - output
|
29
|
-
|
30
|
-
// dynflow_execution_plans
|
31
|
-
// Without msgpack
|
32
|
-
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
|
33
|
-
// uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,run_flow,finalize_flow,execution_history,root_plan_step_id,step_ids
|
34
|
-
|
35
|
-
// With msgpack
|
36
|
-
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
|
37
|
-
// uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,root_plan_step_id,run_flow,finalize_flow,execution_history,step_ids
|
38
|
-
//
|
39
|
-
// 1 - data
|
40
|
-
// 11 - run_flow
|
41
|
-
// 12 - finalize_flow
|
42
|
-
// 13 - execution_history
|
43
|
-
// 14 - step_ids
|
44
|
-
|
45
12
|
func main() {
|
46
13
|
reader := csv.NewReader(os.Stdin)
|
14
|
+
defer os.Stdin.Close()
|
15
|
+
|
47
16
|
writer := csv.NewWriter(os.Stdout)
|
17
|
+
defer os.Stdout.Close()
|
48
18
|
defer writer.Flush()
|
49
19
|
|
50
20
|
for {
|
@@ -58,31 +28,12 @@ func main() {
|
|
58
28
|
}
|
59
29
|
|
60
30
|
func processRow(record []string) []string {
|
61
|
-
// Execution plan exports have 15 fields, other exports have different counts
|
62
|
-
if len(record) == 15 {
|
63
|
-
record = expandExecutionPlan(record)
|
64
|
-
}
|
65
|
-
|
66
31
|
for i, r := range record {
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
return record
|
71
|
-
}
|
72
|
-
|
73
|
-
func expandExecutionPlan(record []string) []string {
|
74
|
-
var flow_columns [2]int
|
75
|
-
|
76
|
-
// The step_ids field should be a safe indicator
|
77
|
-
if isHexEncoded(record[14]) {
|
78
|
-
flow_columns = [...]int{11, 12}
|
79
|
-
} else {
|
80
|
-
flow_columns = [...]int{10, 11}
|
32
|
+
if isHexEncoded(r) {
|
33
|
+
record[i] = reencodeField(r)
|
34
|
+
}
|
81
35
|
}
|
82
36
|
|
83
|
-
for _, i := range flow_columns {
|
84
|
-
record[i] = expandFlow(record[i])
|
85
|
-
}
|
86
37
|
return record
|
87
38
|
}
|
88
39
|
|
@@ -91,38 +42,18 @@ func isHexEncoded(field string) bool {
|
|
91
42
|
}
|
92
43
|
|
93
44
|
func reencodeField(field string) string {
|
94
|
-
|
45
|
+
decoded_bytes, err := hex.DecodeString(field[2:])
|
95
46
|
if err != nil {
|
96
47
|
return field
|
97
48
|
}
|
98
49
|
|
99
|
-
return encode(decoded)
|
100
|
-
}
|
101
|
-
|
102
|
-
func decode(field string) (interface{}, error) {
|
103
50
|
var intermediate interface{}
|
104
|
-
|
105
|
-
|
106
|
-
if isHexEncoded(field) {
|
107
|
-
decoded_bytes, err := hex.DecodeString(field[2:])
|
108
|
-
if err != nil {
|
109
|
-
return "", err
|
110
|
-
}
|
111
|
-
|
112
|
-
err = msgpack.Unmarshal(decoded_bytes, &intermediate)
|
113
|
-
if err != nil {
|
114
|
-
return "", err
|
115
|
-
}
|
116
|
-
|
117
|
-
return intermediate, nil
|
118
|
-
}
|
119
|
-
|
120
|
-
err := json.Unmarshal(bytes, &intermediate)
|
51
|
+
err = msgpack.Unmarshal(decoded_bytes, &intermediate)
|
121
52
|
if err != nil {
|
122
|
-
return
|
53
|
+
return field
|
123
54
|
}
|
124
55
|
|
125
|
-
return intermediate
|
56
|
+
return encode(intermediate)
|
126
57
|
}
|
127
58
|
|
128
59
|
func encode(data interface{}) string {
|
@@ -133,48 +64,3 @@ func encode(data interface{}) string {
|
|
133
64
|
|
134
65
|
return string(result)
|
135
66
|
}
|
136
|
-
|
137
|
-
func expandFlow(field string) string {
|
138
|
-
intermediate, err := decode(field)
|
139
|
-
if err != nil {
|
140
|
-
return field
|
141
|
-
}
|
142
|
-
|
143
|
-
var result map[string]interface{}
|
144
|
-
switch intermediate.(type) {
|
145
|
-
// old style hash
|
146
|
-
case map[string]interface{}:
|
147
|
-
result = intermediate.(map[string]interface{})
|
148
|
-
// newer compact S-expression like representation
|
149
|
-
case []interface{}, float64:
|
150
|
-
result = expandCompactFlow(intermediate)
|
151
|
-
}
|
152
|
-
|
153
|
-
return encode(result)
|
154
|
-
}
|
155
|
-
|
156
|
-
func expandCompactFlow(flow interface{}) map[string]interface{} {
|
157
|
-
result := make(map[string]interface{})
|
158
|
-
switch flow.(type) {
|
159
|
-
case []interface{}:
|
160
|
-
switch flow.([]interface{})[0] {
|
161
|
-
case "S":
|
162
|
-
result["class"] = "Dynflow::Flows::Sequence"
|
163
|
-
case "C":
|
164
|
-
result["class"] = "Dynflow::Flows::Concurrence"
|
165
|
-
default:
|
166
|
-
panic("Unknown flow type")
|
167
|
-
}
|
168
|
-
var subflows []interface{}
|
169
|
-
for subflow := range flow.([]interface{})[1:] {
|
170
|
-
subflows = append(subflows, expandCompactFlow(subflow))
|
171
|
-
}
|
172
|
-
result["flows"] = subflows
|
173
|
-
case float64, int:
|
174
|
-
result["class"] = "Dynflow::Flows::Atom"
|
175
|
-
result["step_id"] = flow
|
176
|
-
default:
|
177
|
-
panic("Unknown flow type")
|
178
|
-
}
|
179
|
-
return result
|
180
|
-
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require '
|
3
|
+
require 'json'
|
4
4
|
require 'msgpack'
|
5
5
|
|
6
6
|
def table_pkeys(table)
|
@@ -74,7 +74,7 @@ Sequel.migration do
|
|
74
74
|
new_columns = columns.map { |c| "#{c}_blob" }
|
75
75
|
|
76
76
|
migrate_table table, columns, new_columns, File do |data|
|
77
|
-
::Sequel.blob(MessagePack.pack(
|
77
|
+
::Sequel.blob(MessagePack.pack(JSON.parse(data)))
|
78
78
|
end
|
79
79
|
end
|
80
80
|
end
|
@@ -83,7 +83,7 @@ Sequel.migration do
|
|
83
83
|
TABLES.each do |table, columns|
|
84
84
|
new_columns = columns.map { |c| c + '_text' }
|
85
85
|
migrate_table table, columns, new_columns, String do |data|
|
86
|
-
|
86
|
+
JSON.dump(MessagePack.unpack(data))
|
87
87
|
end
|
88
88
|
end
|
89
89
|
end
|
data/lib/dynflow/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dynflow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.6.
|
4
|
+
version: 1.6.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ivan Necas
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2022-01-20 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: multi_json
|
@@ -263,6 +263,7 @@ extensions: []
|
|
263
263
|
extra_rdoc_files: []
|
264
264
|
files:
|
265
265
|
- ".github/install_dependencies.sh"
|
266
|
+
- ".github/workflows/release.yml"
|
266
267
|
- ".github/workflows/ruby.yml"
|
267
268
|
- ".gitignore"
|
268
269
|
- ".rubocop.yml"
|
@@ -419,6 +420,10 @@ files:
|
|
419
420
|
- examples/sub_plan_concurrency_control.rb
|
420
421
|
- examples/sub_plans.rb
|
421
422
|
- examples/termination.rb
|
423
|
+
- extras/expand/Dockerfile
|
424
|
+
- extras/expand/README.md
|
425
|
+
- extras/expand/go.mod
|
426
|
+
- extras/expand/go.sum
|
422
427
|
- extras/expand/main.go
|
423
428
|
- extras/statsd_mapping.conf
|
424
429
|
- lib/dynflow.rb
|