dynflow 1.5.0 → 1.6.0
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of dynflow might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/dynflow.gemspec +1 -0
- data/examples/chunked_output_benchmark.rb +77 -0
- data/extras/expand/main.go +180 -0
- data/lib/dynflow/action.rb +11 -1
- data/lib/dynflow/delayed_executors/abstract_core.rb +11 -9
- data/lib/dynflow/director.rb +37 -4
- data/lib/dynflow/dispatcher/client_dispatcher.rb +1 -1
- data/lib/dynflow/dispatcher/executor_dispatcher.rb +8 -0
- data/lib/dynflow/dispatcher.rb +5 -1
- data/lib/dynflow/execution_plan/hooks.rb +1 -1
- data/lib/dynflow/execution_plan/steps/abstract_flow_step.rb +1 -0
- data/lib/dynflow/execution_plan.rb +4 -1
- data/lib/dynflow/executors/abstract/core.rb +9 -0
- data/lib/dynflow/executors/parallel.rb +4 -0
- data/lib/dynflow/persistence.rb +10 -0
- data/lib/dynflow/persistence_adapters/sequel.rb +49 -15
- data/lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb +30 -0
- data/lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb +85 -0
- data/lib/dynflow/testing/dummy_coordinator.rb +10 -0
- data/lib/dynflow/testing/dummy_planned_action.rb +4 -0
- data/lib/dynflow/testing/dummy_world.rb +2 -1
- data/lib/dynflow/testing.rb +1 -0
- data/lib/dynflow/version.rb +1 -1
- data/lib/dynflow/world.rb +12 -0
- data/test/execution_plan_hooks_test.rb +36 -0
- data/test/future_execution_test.rb +6 -3
- data/web/views/flow_step.erb +1 -0
- metadata +21 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 52a7b75f226c65a193d6c86e84f46402ba2d22fa34d2be611c1e23eb7275027c
|
4
|
+
data.tar.gz: 34f06ed940ddfd241fe9496ed23b7b27046361af5d1c1b4f21c0294afdb972bd
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 78655398a5d1c246c37d9187e8d1b3d04de4891f49404ec0cf9001c3c87254c079cce67012a79ce27848e95b436b4142d70ae36bb3c9cac4a972163a58c7efb8
|
7
|
+
data.tar.gz: fcebf584181e0b14acb7681ef71496189283e2f38ce3595ba3310babc754d1ad07ee7f99c92df23ac64db47d6a57ab24c64866161a1af059c16aab31a7cfa0eb
|
data/dynflow.gemspec
CHANGED
@@ -20,6 +20,7 @@ Gem::Specification.new do |s|
|
|
20
20
|
s.required_ruby_version = '>= 2.3.0'
|
21
21
|
|
22
22
|
s.add_dependency "multi_json"
|
23
|
+
s.add_dependency "msgpack", '~> 1.3.3'
|
23
24
|
s.add_dependency "apipie-params"
|
24
25
|
s.add_dependency "algebrick", '~> 0.7.0'
|
25
26
|
s.add_dependency "concurrent-ruby", '~> 1.1.3'
|
@@ -0,0 +1,77 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require_relative 'example_helper'
|
5
|
+
require 'benchmark'
|
6
|
+
|
7
|
+
WORDS = File.readlines('/usr/share/dict/words').map(&:chomp).freeze
|
8
|
+
COUNT = WORDS.count
|
9
|
+
|
10
|
+
module Common
|
11
|
+
def main_loop
|
12
|
+
if output[:current] < input[:limit]
|
13
|
+
consumed = yield
|
14
|
+
output[:current] += consumed
|
15
|
+
plan_event(nil)
|
16
|
+
suspend
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def batch
|
21
|
+
WORDS.drop(output[:current]).take(input[:chunk])
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
class Regular < ::Dynflow::Action
|
26
|
+
include Common
|
27
|
+
|
28
|
+
def run(event = nil)
|
29
|
+
output[:current] ||= 0
|
30
|
+
output[:words] ||= []
|
31
|
+
|
32
|
+
main_loop do
|
33
|
+
words = batch
|
34
|
+
output[:words] << words
|
35
|
+
words.count
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
class Chunked < ::Dynflow::Action
|
41
|
+
include Common
|
42
|
+
|
43
|
+
def run(event = nil)
|
44
|
+
output[:current] ||= 0
|
45
|
+
|
46
|
+
main_loop do
|
47
|
+
words = batch
|
48
|
+
output_chunk(words)
|
49
|
+
words.count
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
if $0 == __FILE__
|
55
|
+
ExampleHelper.world.action_logger.level = 4
|
56
|
+
ExampleHelper.world.logger.level = 4
|
57
|
+
|
58
|
+
Benchmark.bm do |bm|
|
59
|
+
bm.report('regular 1000 by 100') { ExampleHelper.world.trigger(Regular, limit: 1000, chunk: 100).finished.wait }
|
60
|
+
bm.report('chunked 1000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 1000, chunk: 100).finished.wait }
|
61
|
+
|
62
|
+
bm.report('regular 10_000 by 100') { ExampleHelper.world.trigger(Regular, limit: 10_000, chunk: 100).finished.wait }
|
63
|
+
bm.report('chunked 10_000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 10_000, chunk: 100).finished.wait }
|
64
|
+
|
65
|
+
bm.report('regular 10_000 by 1000') { ExampleHelper.world.trigger(Regular, limit: 10_000, chunk: 1000).finished.wait }
|
66
|
+
bm.report('chunked 10_000 by 1000') { ExampleHelper.world.trigger(Chunked, limit: 10_000, chunk: 1000).finished.wait }
|
67
|
+
|
68
|
+
bm.report('regular 100_000 by 100') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 100).finished.wait }
|
69
|
+
bm.report('chunked 100_000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 100).finished.wait }
|
70
|
+
|
71
|
+
bm.report('regular 100_000 by 1000') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 1000).finished.wait }
|
72
|
+
bm.report('chunked 100_000 by 1000') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 1000).finished.wait }
|
73
|
+
|
74
|
+
bm.report('regular 100_000 by 10_000') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 10_000).finished.wait }
|
75
|
+
bm.report('chunked 100_000 by 10_000') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 10_000).finished.wait }
|
76
|
+
end
|
77
|
+
end
|
@@ -0,0 +1,180 @@
|
|
1
|
+
package main
|
2
|
+
|
3
|
+
import (
|
4
|
+
"encoding/csv"
|
5
|
+
"encoding/hex"
|
6
|
+
"encoding/json"
|
7
|
+
"github.com/vmihailenco/msgpack"
|
8
|
+
"io"
|
9
|
+
"os"
|
10
|
+
)
|
11
|
+
|
12
|
+
// dynflow_steps
|
13
|
+
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
|
14
|
+
// execution_plan_uuid,id,action_id,data,state,started_at,ended_at,real_time,execution_time,progress_done,progress_weight,class,error,action_class,children,queue
|
15
|
+
//
|
16
|
+
// encoded columns are:
|
17
|
+
// 3 - data
|
18
|
+
// 12 - error
|
19
|
+
// 14 - children
|
20
|
+
|
21
|
+
// dynflow_actions
|
22
|
+
// 0 1 2 3 4 5 6 7 8 9 10
|
23
|
+
// execution_plan_uuid,id,data,caller_execution_plan_id,caller_action_id,class,input,output,plan_step_id,run_step_id,finalize_step_id
|
24
|
+
//
|
25
|
+
// encoded columns are:
|
26
|
+
// 2 - data
|
27
|
+
// 6 - input
|
28
|
+
// 7 - output
|
29
|
+
|
30
|
+
// dynflow_execution_plans
|
31
|
+
// Without msgpack
|
32
|
+
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
|
33
|
+
// uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,run_flow,finalize_flow,execution_history,root_plan_step_id,step_ids
|
34
|
+
|
35
|
+
// With msgpack
|
36
|
+
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
|
37
|
+
// uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,root_plan_step_id,run_flow,finalize_flow,execution_history,step_ids
|
38
|
+
//
|
39
|
+
// 1 - data
|
40
|
+
// 11 - run_flow
|
41
|
+
// 12 - finalize_flow
|
42
|
+
// 13 - execution_history
|
43
|
+
// 14 - step_ids
|
44
|
+
|
45
|
+
func main() {
|
46
|
+
reader := csv.NewReader(os.Stdin)
|
47
|
+
writer := csv.NewWriter(os.Stdout)
|
48
|
+
defer writer.Flush()
|
49
|
+
|
50
|
+
for {
|
51
|
+
record, err := reader.Read()
|
52
|
+
if err == io.EOF {
|
53
|
+
break
|
54
|
+
}
|
55
|
+
|
56
|
+
writer.Write(processRow(record))
|
57
|
+
}
|
58
|
+
}
|
59
|
+
|
60
|
+
func processRow(record []string) []string {
|
61
|
+
// Execution plan exports have 15 fields, other exports have different counts
|
62
|
+
if len(record) == 15 {
|
63
|
+
record = expandExecutionPlan(record)
|
64
|
+
}
|
65
|
+
|
66
|
+
for i, r := range record {
|
67
|
+
record[i] = reencodeField(r)
|
68
|
+
}
|
69
|
+
|
70
|
+
return record
|
71
|
+
}
|
72
|
+
|
73
|
+
func expandExecutionPlan(record []string) []string {
|
74
|
+
var flow_columns [2]int
|
75
|
+
|
76
|
+
// The step_ids field should be a safe indicator
|
77
|
+
if isHexEncoded(record[14]) {
|
78
|
+
flow_columns = [...]int{11, 12}
|
79
|
+
} else {
|
80
|
+
flow_columns = [...]int{10, 11}
|
81
|
+
}
|
82
|
+
|
83
|
+
for _, i := range flow_columns {
|
84
|
+
record[i] = expandFlow(record[i])
|
85
|
+
}
|
86
|
+
return record
|
87
|
+
}
|
88
|
+
|
89
|
+
func isHexEncoded(field string) bool {
|
90
|
+
return len(field) >= 2 && field[0:2] == "\\x"
|
91
|
+
}
|
92
|
+
|
93
|
+
func reencodeField(field string) string {
|
94
|
+
decoded, err := decode(field)
|
95
|
+
if err != nil {
|
96
|
+
return field
|
97
|
+
}
|
98
|
+
|
99
|
+
return encode(decoded)
|
100
|
+
}
|
101
|
+
|
102
|
+
func decode(field string) (interface{}, error) {
|
103
|
+
var intermediate interface{}
|
104
|
+
bytes := []byte(field)
|
105
|
+
|
106
|
+
if isHexEncoded(field) {
|
107
|
+
decoded_bytes, err := hex.DecodeString(field[2:])
|
108
|
+
if err != nil {
|
109
|
+
return "", err
|
110
|
+
}
|
111
|
+
|
112
|
+
err = msgpack.Unmarshal(decoded_bytes, &intermediate)
|
113
|
+
if err != nil {
|
114
|
+
return "", err
|
115
|
+
}
|
116
|
+
|
117
|
+
return intermediate, nil
|
118
|
+
}
|
119
|
+
|
120
|
+
err := json.Unmarshal(bytes, &intermediate)
|
121
|
+
if err != nil {
|
122
|
+
return "", err
|
123
|
+
}
|
124
|
+
|
125
|
+
return intermediate, nil
|
126
|
+
}
|
127
|
+
|
128
|
+
func encode(data interface{}) string {
|
129
|
+
result, err := json.Marshal(data)
|
130
|
+
if err != nil {
|
131
|
+
panic(err)
|
132
|
+
}
|
133
|
+
|
134
|
+
return string(result)
|
135
|
+
}
|
136
|
+
|
137
|
+
func expandFlow(field string) string {
|
138
|
+
intermediate, err := decode(field)
|
139
|
+
if err != nil {
|
140
|
+
return field
|
141
|
+
}
|
142
|
+
|
143
|
+
var result map[string]interface{}
|
144
|
+
switch intermediate.(type) {
|
145
|
+
// old style hash
|
146
|
+
case map[string]interface{}:
|
147
|
+
result = intermediate.(map[string]interface{})
|
148
|
+
// newer compact S-expression like representation
|
149
|
+
case []interface{}, float64:
|
150
|
+
result = expandCompactFlow(intermediate)
|
151
|
+
}
|
152
|
+
|
153
|
+
return encode(result)
|
154
|
+
}
|
155
|
+
|
156
|
+
func expandCompactFlow(flow interface{}) map[string]interface{} {
|
157
|
+
result := make(map[string]interface{})
|
158
|
+
switch flow.(type) {
|
159
|
+
case []interface{}:
|
160
|
+
switch flow.([]interface{})[0] {
|
161
|
+
case "S":
|
162
|
+
result["class"] = "Dynflow::Flows::Sequence"
|
163
|
+
case "C":
|
164
|
+
result["class"] = "Dynflow::Flows::Concurrence"
|
165
|
+
default:
|
166
|
+
panic("Unknown flow type")
|
167
|
+
}
|
168
|
+
var subflows []interface{}
|
169
|
+
for subflow := range flow.([]interface{})[1:] {
|
170
|
+
subflows = append(subflows, expandCompactFlow(subflow))
|
171
|
+
}
|
172
|
+
result["flows"] = subflows
|
173
|
+
case float64, int:
|
174
|
+
result["class"] = "Dynflow::Flows::Atom"
|
175
|
+
result["step_id"] = flow
|
176
|
+
default:
|
177
|
+
panic("Unknown flow type")
|
178
|
+
}
|
179
|
+
return result
|
180
|
+
}
|
data/lib/dynflow/action.rb
CHANGED
@@ -105,7 +105,8 @@ module Dynflow
|
|
105
105
|
|
106
106
|
attr_reader :world, :phase, :execution_plan_id, :id, :input,
|
107
107
|
:plan_step_id, :run_step_id, :finalize_step_id,
|
108
|
-
:caller_execution_plan_id, :caller_action_id
|
108
|
+
:caller_execution_plan_id, :caller_action_id,
|
109
|
+
:pending_output_chunks
|
109
110
|
|
110
111
|
middleware.use Action::Progress::Calculate
|
111
112
|
|
@@ -133,6 +134,7 @@ module Dynflow
|
|
133
134
|
|
134
135
|
@input = OutputReference.deserialize getter.(:input, phase?(Run, Finalize, Present))
|
135
136
|
@output = OutputReference.deserialize getter.(:output, false) if phase? Run, Finalize, Present
|
137
|
+
@pending_output_chunks = [] if phase? Run, Finalize
|
136
138
|
end
|
137
139
|
|
138
140
|
def phase?(*phases)
|
@@ -169,6 +171,14 @@ module Dynflow
|
|
169
171
|
end
|
170
172
|
end
|
171
173
|
|
174
|
+
def output_chunk(chunk, kind: nil, timestamp: Time.now)
|
175
|
+
@pending_output_chunks << { chunk: chunk, kind: kind, timestamp: timestamp }
|
176
|
+
end
|
177
|
+
|
178
|
+
def stored_output_chunks
|
179
|
+
@output_chunks ||= world.persistence.load_output_chunks(@execution_plan_id, @id)
|
180
|
+
end
|
181
|
+
|
172
182
|
def caller_action
|
173
183
|
phase! Present
|
174
184
|
return nil if @caller_action_id
|
@@ -46,24 +46,21 @@ module Dynflow
|
|
46
46
|
|
47
47
|
def process(delayed_plans, check_time)
|
48
48
|
processed_plan_uuids = []
|
49
|
+
dispatched_plan_uuids = []
|
50
|
+
planning_locks = world.coordinator.find_records(class: Coordinator::PlanningLock.name)
|
49
51
|
delayed_plans.each do |plan|
|
50
|
-
next if plan.frozen
|
52
|
+
next if plan.frozen || locked_for_planning?(planning_locks, plan)
|
51
53
|
fix_plan_state(plan)
|
52
54
|
with_error_handling do
|
53
55
|
if plan.execution_plan.state != :scheduled
|
54
56
|
# in case the previous process was terminated after running the plan, but before deleting the delayed plan record.
|
55
57
|
@logger.info("Execution plan #{plan.execution_plan_uuid} is expected to be in 'scheduled' state, was '#{plan.execution_plan.state}', skipping")
|
56
|
-
|
57
|
-
@logger.debug "Failing plan #{plan.execution_plan_uuid}"
|
58
|
-
plan.timeout
|
58
|
+
processed_plan_uuids << plan.execution_plan_uuid
|
59
59
|
else
|
60
60
|
@logger.debug "Executing plan #{plan.execution_plan_uuid}"
|
61
|
-
|
62
|
-
|
63
|
-
plan.execute
|
64
|
-
end
|
61
|
+
world.plan_request(plan.execution_plan_uuid)
|
62
|
+
dispatched_plan_uuids << plan.execution_plan_uuid
|
65
63
|
end
|
66
|
-
processed_plan_uuids << plan.execution_plan_uuid
|
67
64
|
end
|
68
65
|
end
|
69
66
|
world.persistence.delete_delayed_plans(:execution_plan_uuid => processed_plan_uuids) unless processed_plan_uuids.empty?
|
@@ -72,6 +69,7 @@ module Dynflow
|
|
72
69
|
private
|
73
70
|
|
74
71
|
# handle the case, where the process was termintated while planning was in progress before
|
72
|
+
# TODO: Doing execution plan updates in orchestrator is bad
|
75
73
|
def fix_plan_state(plan)
|
76
74
|
if plan.execution_plan.state == :planning
|
77
75
|
@logger.info("Execution plan #{plan.execution_plan_uuid} is expected to be in 'scheduled' state, was '#{plan.execution_plan.state}', auto-fixing")
|
@@ -79,6 +77,10 @@ module Dynflow
|
|
79
77
|
plan.execution_plan.save
|
80
78
|
end
|
81
79
|
end
|
80
|
+
|
81
|
+
def locked_for_planning?(planning_locks, plan)
|
82
|
+
planning_locks.any? { |lock| lock.execution_plan_id == plan.execution_plan_uuid }
|
83
|
+
end
|
82
84
|
end
|
83
85
|
end
|
84
86
|
end
|
data/lib/dynflow/director.rb
CHANGED
@@ -53,7 +53,7 @@ module Dynflow
|
|
53
53
|
end
|
54
54
|
|
55
55
|
def self.new_from_hash(hash, *_args)
|
56
|
-
self.new(hash[:execution_plan_id], hash[:queue])
|
56
|
+
self.new(hash[:execution_plan_id], hash[:queue], hash[:sender_orchestrator_id])
|
57
57
|
end
|
58
58
|
end
|
59
59
|
|
@@ -108,6 +108,26 @@ module Dynflow
|
|
108
108
|
end
|
109
109
|
end
|
110
110
|
|
111
|
+
class PlanningWorkItem < WorkItem
|
112
|
+
def execute
|
113
|
+
plan = world.persistence.load_delayed_plan(execution_plan_id)
|
114
|
+
return if plan.nil? || plan.execution_plan.state != :scheduled
|
115
|
+
|
116
|
+
if !plan.start_before.nil? && plan.start_before < Time.now.utc()
|
117
|
+
plan.timeout
|
118
|
+
return
|
119
|
+
end
|
120
|
+
|
121
|
+
world.coordinator.acquire(Coordinator::PlanningLock.new(world, plan.execution_plan_uuid)) do
|
122
|
+
plan.plan
|
123
|
+
end
|
124
|
+
plan.execute
|
125
|
+
rescue => e
|
126
|
+
world.logger.warn e.message
|
127
|
+
world.logger.debug e.backtrace.join("\n")
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
111
131
|
class FinalizeWorkItem < WorkItem
|
112
132
|
attr_reader :finalize_steps_data
|
113
133
|
|
@@ -147,12 +167,18 @@ module Dynflow
|
|
147
167
|
@logger = world.logger
|
148
168
|
@execution_plan_managers = {}
|
149
169
|
@rescued_steps = {}
|
170
|
+
@planning_plans = []
|
150
171
|
end
|
151
172
|
|
152
173
|
def current_execution_plan_ids
|
153
174
|
@execution_plan_managers.keys
|
154
175
|
end
|
155
176
|
|
177
|
+
def handle_planning(execution_plan_uuid)
|
178
|
+
@planning_plans << execution_plan_uuid
|
179
|
+
[PlanningWorkItem.new(execution_plan_uuid, :default, @world.id)]
|
180
|
+
end
|
181
|
+
|
156
182
|
def start_execution(execution_plan_id, finished)
|
157
183
|
manager = track_execution_plan(execution_plan_id, finished)
|
158
184
|
return [] unless manager
|
@@ -176,9 +202,16 @@ module Dynflow
|
|
176
202
|
end
|
177
203
|
|
178
204
|
def work_finished(work)
|
179
|
-
|
180
|
-
|
181
|
-
|
205
|
+
case work
|
206
|
+
when PlanningWorkItem
|
207
|
+
@planning_plans.delete(work.execution_plan_id)
|
208
|
+
@world.persistence.delete_delayed_plans(:execution_plan_uuid => work.execution_plan_id)
|
209
|
+
[]
|
210
|
+
else
|
211
|
+
manager = @execution_plan_managers[work.execution_plan_id]
|
212
|
+
return [] unless manager # skip case when getting event from execution plan that is not running anymore
|
213
|
+
unless_done(manager, manager.what_is_next(work))
|
214
|
+
end
|
182
215
|
end
|
183
216
|
|
184
217
|
# called when there was an unhandled exception during the execution
|
@@ -134,7 +134,7 @@ module Dynflow
|
|
134
134
|
def dispatch_request(request, client_world_id, request_id)
|
135
135
|
ignore_unknown = false
|
136
136
|
executor_id = match request,
|
137
|
-
(on ~Execution do |execution|
|
137
|
+
(on ~Execution | ~Planning do |execution|
|
138
138
|
AnyExecutor
|
139
139
|
end),
|
140
140
|
(on ~Event do |event|
|
@@ -9,6 +9,7 @@ module Dynflow
|
|
9
9
|
|
10
10
|
def handle_request(envelope)
|
11
11
|
match(envelope.message,
|
12
|
+
on(Planning) { perform_planning(envelope, envelope.message)},
|
12
13
|
on(Execution) { perform_execution(envelope, envelope.message) },
|
13
14
|
on(Event) { perform_event(envelope, envelope.message) },
|
14
15
|
on(Status) { get_execution_status(envelope, envelope.message) })
|
@@ -16,6 +17,13 @@ module Dynflow
|
|
16
17
|
|
17
18
|
protected
|
18
19
|
|
20
|
+
def perform_planning(envelope, planning)
|
21
|
+
@world.executor.plan(planning.execution_plan_id)
|
22
|
+
respond(envelope, Accepted)
|
23
|
+
rescue Dynflow::Error => e
|
24
|
+
respond(envelope, Failed[e.message])
|
25
|
+
end
|
26
|
+
|
19
27
|
def perform_execution(envelope, execution)
|
20
28
|
allocate_executor(execution.execution_plan_id, envelope.sender_id, envelope.request_id)
|
21
29
|
execution_lock = Coordinator::ExecutionLock.new(@world, execution.execution_plan_id, envelope.sender_id, envelope.request_id)
|
data/lib/dynflow/dispatcher.rb
CHANGED
@@ -14,6 +14,10 @@ module Dynflow
|
|
14
14
|
fields! execution_plan_id: String
|
15
15
|
end
|
16
16
|
|
17
|
+
Planning = type do
|
18
|
+
fields! execution_plan_id: String
|
19
|
+
end
|
20
|
+
|
17
21
|
Ping = type do
|
18
22
|
fields! receiver_id: String,
|
19
23
|
use_cache: type { variants TrueClass, FalseClass }
|
@@ -24,7 +28,7 @@ module Dynflow
|
|
24
28
|
execution_plan_id: type { variants String, NilClass }
|
25
29
|
end
|
26
30
|
|
27
|
-
variants Event, Execution, Ping, Status
|
31
|
+
variants Event, Execution, Ping, Status, Planning
|
28
32
|
end
|
29
33
|
|
30
34
|
Response = Algebrick.type do
|
@@ -21,7 +21,7 @@ module Dynflow
|
|
21
21
|
# @param class_name [Class] class of the hook to be run
|
22
22
|
# @param on [Symbol, Array<Symbol>] when should the hook be run, one of {HOOK_KINDS}
|
23
23
|
# @return [void]
|
24
|
-
def use(class_name, on:
|
24
|
+
def use(class_name, on: ExecutionPlan.states)
|
25
25
|
on = Array[on] unless on.kind_of?(Array)
|
26
26
|
validate_kinds!(on)
|
27
27
|
if hooks[class_name]
|
@@ -254,6 +254,7 @@ module Dynflow
|
|
254
254
|
def delay(caller_action, action_class, delay_options, *args)
|
255
255
|
save
|
256
256
|
@root_plan_step = add_scheduling_step(action_class, caller_action)
|
257
|
+
run_hooks(:pending)
|
257
258
|
serializer = root_plan_step.delay(delay_options, args)
|
258
259
|
delayed_plan = DelayedPlan.new(@world,
|
259
260
|
id,
|
@@ -276,7 +277,9 @@ module Dynflow
|
|
276
277
|
raise "Unexpected options #{options.keys.inspect}" unless options.empty?
|
277
278
|
save
|
278
279
|
@root_plan_step = add_plan_step(action_class, caller_action)
|
279
|
-
@root_plan_step.save
|
280
|
+
step = @root_plan_step.save
|
281
|
+
run_hooks(:pending)
|
282
|
+
step
|
280
283
|
end
|
281
284
|
|
282
285
|
def plan(*args)
|
@@ -35,6 +35,15 @@ module Dynflow
|
|
35
35
|
handle_work(@director.handle_event(event))
|
36
36
|
end
|
37
37
|
|
38
|
+
def handle_planning(execution_plan_id)
|
39
|
+
if terminating?
|
40
|
+
raise Dynflow::Error,
|
41
|
+
"cannot accept event: #{event} core is terminating"
|
42
|
+
end
|
43
|
+
|
44
|
+
handle_work(@director.handle_planning(execution_plan_id))
|
45
|
+
end
|
46
|
+
|
38
47
|
def plan_events(delayed_events)
|
39
48
|
delayed_events.each do |event|
|
40
49
|
@world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time, optional: event.optional)
|
data/lib/dynflow/persistence.rb
CHANGED
@@ -46,6 +46,16 @@ module Dynflow
|
|
46
46
|
adapter.save_action(execution_plan_id, action.id, action.to_hash)
|
47
47
|
end
|
48
48
|
|
49
|
+
def save_output_chunks(execution_plan_id, action_id, chunks)
|
50
|
+
return if chunks.empty?
|
51
|
+
|
52
|
+
adapter.save_output_chunks(execution_plan_id, action_id, chunks)
|
53
|
+
end
|
54
|
+
|
55
|
+
def load_output_chunks(execution_plan_id, action_id)
|
56
|
+
adapter.load_output_chunks(execution_plan_id, action_id)
|
57
|
+
end
|
58
|
+
|
49
59
|
def find_execution_plans(options)
|
50
60
|
adapter.find_execution_plans(options).map do |execution_plan_hash|
|
51
61
|
ExecutionPlan.new_from_hash(execution_plan_hash, @world)
|
@@ -1,9 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
require 'sequel'
|
3
|
-
require '
|
3
|
+
require 'msgpack'
|
4
4
|
require 'fileutils'
|
5
5
|
require 'csv'
|
6
6
|
|
7
|
+
# rubocop:disable Metrics/ClassLength
|
7
8
|
module Dynflow
|
8
9
|
module PersistenceAdapters
|
9
10
|
|
@@ -37,12 +38,14 @@ module Dynflow
|
|
37
38
|
class action_class execution_plan_uuid queue),
|
38
39
|
envelope: %w(receiver_id),
|
39
40
|
coordinator_record: %w(id owner_id class),
|
40
|
-
delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen)
|
41
|
+
delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen),
|
42
|
+
output_chunk: %w(execution_plan_uuid action_id kind timestamp) }
|
41
43
|
|
42
44
|
SERIALIZABLE_COLUMNS = { action: %w(input output),
|
43
45
|
delayed: %w(serialized_args),
|
44
46
|
execution_plan: %w(run_flow finalize_flow execution_history step_ids),
|
45
|
-
step: %w(error children)
|
47
|
+
step: %w(error children),
|
48
|
+
output_chunk: %w(chunk) }
|
46
49
|
|
47
50
|
def initialize(config)
|
48
51
|
migrate = true
|
@@ -83,15 +86,17 @@ module Dynflow
|
|
83
86
|
table(:delayed).where(execution_plan_uuid: uuids).delete
|
84
87
|
|
85
88
|
steps = table(:step).where(execution_plan_uuid: uuids)
|
86
|
-
backup_to_csv(steps, backup_dir, 'steps.csv') if backup_dir
|
89
|
+
backup_to_csv(:step, steps, backup_dir, 'steps.csv') if backup_dir
|
87
90
|
steps.delete
|
88
91
|
|
92
|
+
output_chunks = table(:output_chunk).where(execution_plan_uuid: uuids).delete
|
93
|
+
|
89
94
|
actions = table(:action).where(execution_plan_uuid: uuids)
|
90
|
-
backup_to_csv(actions, backup_dir, 'actions.csv') if backup_dir
|
95
|
+
backup_to_csv(:action, actions, backup_dir, 'actions.csv') if backup_dir
|
91
96
|
actions.delete
|
92
97
|
|
93
98
|
execution_plans = table(:execution_plan).where(uuid: uuids)
|
94
|
-
backup_to_csv(execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
|
99
|
+
backup_to_csv(:execution_plan, execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
|
95
100
|
count += execution_plans.delete
|
96
101
|
end
|
97
102
|
end
|
@@ -173,6 +178,18 @@ module Dynflow
|
|
173
178
|
save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value, with_data: false
|
174
179
|
end
|
175
180
|
|
181
|
+
def save_output_chunks(execution_plan_id, action_id, chunks)
|
182
|
+
chunks.each do |chunk|
|
183
|
+
chunk[:execution_plan_uuid] = execution_plan_id
|
184
|
+
chunk[:action_id] = action_id
|
185
|
+
save :output_chunk, {}, chunk, with_data: false
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
def load_output_chunks(execution_plan_id, action_id)
|
190
|
+
load_records :output_chunk, { execution_plan_uuid: execution_plan_id, action_id: action_id }, [:timestamp, :kind, :chunk]
|
191
|
+
end
|
192
|
+
|
176
193
|
def connector_feature!
|
177
194
|
unless @additional_responsibilities[:connector]
|
178
195
|
raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
|
@@ -265,7 +282,8 @@ module Dynflow
|
|
265
282
|
step: :dynflow_steps,
|
266
283
|
envelope: :dynflow_envelopes,
|
267
284
|
coordinator_record: :dynflow_coordinator_records,
|
268
|
-
delayed: :dynflow_delayed_plans
|
285
|
+
delayed: :dynflow_delayed_plans,
|
286
|
+
output_chunk: :dynflow_output_chunks }
|
269
287
|
|
270
288
|
def table(which)
|
271
289
|
db[TABLES.fetch(which)]
|
@@ -281,10 +299,15 @@ module Dynflow
|
|
281
299
|
|
282
300
|
def prepare_record(table_name, value, base = {}, with_data = true)
|
283
301
|
record = base.dup
|
284
|
-
|
302
|
+
has_data_column = table(table_name).columns.include?(:data)
|
303
|
+
if with_data && has_data_column
|
285
304
|
record[:data] = dump_data(value)
|
286
305
|
else
|
287
|
-
|
306
|
+
if has_data_column
|
307
|
+
record[:data] = nil
|
308
|
+
else
|
309
|
+
record.delete(:data)
|
310
|
+
end
|
288
311
|
record.merge! serialize_columns(table_name, value)
|
289
312
|
end
|
290
313
|
|
@@ -339,7 +362,11 @@ module Dynflow
|
|
339
362
|
records = with_retry do
|
340
363
|
filtered = table.filter(Utils.symbolize_keys(condition))
|
341
364
|
# Filter out requested columns which the table doesn't have, load data just in case
|
342
|
-
|
365
|
+
unless keys.nil?
|
366
|
+
columns = table.columns & keys
|
367
|
+
columns |= [:data] if table.columns.include?(:data)
|
368
|
+
filtered = filtered.select(*columns)
|
369
|
+
end
|
343
370
|
filtered.all
|
344
371
|
end
|
345
372
|
records = records.map { |record| load_data(record, what) }
|
@@ -355,11 +382,11 @@ module Dynflow
|
|
355
382
|
hash = if record[:data].nil?
|
356
383
|
SERIALIZABLE_COLUMNS.fetch(what, []).each do |key|
|
357
384
|
key = key.to_sym
|
358
|
-
record[key] =
|
385
|
+
record[key] = MessagePack.unpack((record[key])) unless record[key].nil?
|
359
386
|
end
|
360
387
|
record
|
361
388
|
else
|
362
|
-
|
389
|
+
MessagePack.unpack(record[:data])
|
363
390
|
end
|
364
391
|
Utils.indifferent_hash(hash)
|
365
392
|
end
|
@@ -368,7 +395,7 @@ module Dynflow
|
|
368
395
|
FileUtils.mkdir_p(backup_dir) unless File.directory?(backup_dir)
|
369
396
|
end
|
370
397
|
|
371
|
-
def backup_to_csv(dataset, backup_dir, file_name)
|
398
|
+
def backup_to_csv(table_name, dataset, backup_dir, file_name)
|
372
399
|
ensure_backup_dir(backup_dir)
|
373
400
|
csv_file = File.join(backup_dir, file_name)
|
374
401
|
appending = File.exist?(csv_file)
|
@@ -376,7 +403,12 @@ module Dynflow
|
|
376
403
|
File.open(csv_file, 'a') do |csv|
|
377
404
|
csv << columns.to_csv unless appending
|
378
405
|
dataset.each do |row|
|
379
|
-
|
406
|
+
values = columns.map do |col|
|
407
|
+
value = row[col]
|
408
|
+
value = value.unpack('H*').first if value && SERIALIZABLE_COLUMNS.fetch(table_name, []).include?(col.to_s)
|
409
|
+
value
|
410
|
+
end
|
411
|
+
csv << values.to_csv
|
380
412
|
end
|
381
413
|
end
|
382
414
|
dataset
|
@@ -394,7 +426,8 @@ module Dynflow
|
|
394
426
|
|
395
427
|
def dump_data(value)
|
396
428
|
return if value.nil?
|
397
|
-
|
429
|
+
packed = MessagePack.pack(Type!(value, Hash, Array, Integer, String))
|
430
|
+
::Sequel.blob(packed)
|
398
431
|
end
|
399
432
|
|
400
433
|
def paginate(data_set, options)
|
@@ -477,3 +510,4 @@ module Dynflow
|
|
477
510
|
end
|
478
511
|
end
|
479
512
|
end
|
513
|
+
# rubocop:enable Metrics/ClassLength
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
Sequel.migration do
|
3
|
+
up do
|
4
|
+
type = database_type
|
5
|
+
create_table(:dynflow_output_chunks) do
|
6
|
+
primary_key :id
|
7
|
+
|
8
|
+
column_properties = if type.to_s.include?('postgres')
|
9
|
+
{type: :uuid}
|
10
|
+
else
|
11
|
+
{type: String, size: 36, fixed: true, null: false}
|
12
|
+
end
|
13
|
+
foreign_key :execution_plan_uuid, :dynflow_execution_plans, **column_properties
|
14
|
+
index :execution_plan_uuid
|
15
|
+
|
16
|
+
column :action_id, Integer, null: false
|
17
|
+
foreign_key [:execution_plan_uuid, :action_id], :dynflow_actions,
|
18
|
+
name: :dynflow_output_chunks_execution_plan_uuid_fkey1
|
19
|
+
index [:execution_plan_uuid, :action_id]
|
20
|
+
|
21
|
+
column :chunk, String, text: true
|
22
|
+
column :kind, String
|
23
|
+
column :timestamp, Time, null: false
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
down do
|
28
|
+
drop_table(:dynflow_output_chunks)
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'multi_json'
|
4
|
+
require 'msgpack'
|
5
|
+
|
6
|
+
def table_pkeys(table)
|
7
|
+
case table
|
8
|
+
when :dynflow_actions, :dynflow_steps
|
9
|
+
[:execution_plan_uuid, :id]
|
10
|
+
when :dynflow_coordinator_records
|
11
|
+
[:id, :class]
|
12
|
+
when :dynflow_delayed_plans
|
13
|
+
[:execution_plan_uuid]
|
14
|
+
when :dynflow_envelopes
|
15
|
+
[:id]
|
16
|
+
when
|
17
|
+
[:uuid]
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
def conditions_for_row(table, row)
|
22
|
+
row.slice(*table_pkeys(table))
|
23
|
+
end
|
24
|
+
|
25
|
+
def migrate_table(table, from_names, to_names, new_type)
|
26
|
+
alter_table(table) do
|
27
|
+
to_names.each do |new|
|
28
|
+
add_column new, new_type
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
relevant_columns = table_pkeys(table) | from_names
|
33
|
+
|
34
|
+
from(table).select(*relevant_columns).each do |row|
|
35
|
+
update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
|
36
|
+
row[from].nil? ? acc : acc.merge(to => yield(row[from]))
|
37
|
+
end
|
38
|
+
next if update.empty?
|
39
|
+
from(table).where(conditions_for_row(table, row)).update(update)
|
40
|
+
end
|
41
|
+
|
42
|
+
from_names.zip(to_names).each do |old, new|
|
43
|
+
alter_table(table) do
|
44
|
+
drop_column old
|
45
|
+
end
|
46
|
+
|
47
|
+
if database_type == :mysql
|
48
|
+
type = new_type == File ? 'blob' : 'mediumtext'
|
49
|
+
run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
|
50
|
+
else
|
51
|
+
rename_column table, new, old
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
Sequel.migration do
|
57
|
+
|
58
|
+
TABLES = {
|
59
|
+
:dynflow_actions => [:data, :input, :output],
|
60
|
+
:dynflow_coordinator_records => [:data],
|
61
|
+
:dynflow_delayed_plans => [:serialized_args, :data],
|
62
|
+
:dynflow_envelopes => [:data],
|
63
|
+
:dynflow_execution_plans => [:run_flow, :finalize_flow, :execution_history, :step_ids],
|
64
|
+
:dynflow_steps => [:error, :children]
|
65
|
+
}
|
66
|
+
|
67
|
+
up do
|
68
|
+
TABLES.each do |table, columns|
|
69
|
+
new_columns = columns.map { |c| "#{c}_blob" }
|
70
|
+
|
71
|
+
migrate_table table, columns, new_columns, File do |data|
|
72
|
+
::Sequel.blob(MessagePack.pack(MultiJson.load(data)))
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
down do
|
78
|
+
TABLES.each do |table, columns|
|
79
|
+
new_columns = columns.map { |c| c + '_text' }
|
80
|
+
migrate_table table, columns, new_columns, String do |data|
|
81
|
+
MultiJson.dump(MessagePack.unpack(data))
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
@@ -5,7 +5,7 @@ module Dynflow
|
|
5
5
|
extend Mimic
|
6
6
|
mimic! World
|
7
7
|
|
8
|
-
attr_reader :clock, :executor, :middleware
|
8
|
+
attr_reader :clock, :executor, :middleware, :coordinator
|
9
9
|
attr_accessor :action
|
10
10
|
|
11
11
|
def initialize(_config = nil)
|
@@ -13,6 +13,7 @@ module Dynflow
|
|
13
13
|
@clock = ManagedClock.new
|
14
14
|
@executor = DummyExecutor.new(self)
|
15
15
|
@middleware = Middleware::World.new
|
16
|
+
@coordinator = DummyCoordinator.new
|
16
17
|
end
|
17
18
|
|
18
19
|
def action_logger
|
data/lib/dynflow/testing.rb
CHANGED
@@ -19,6 +19,7 @@ module Dynflow
|
|
19
19
|
|
20
20
|
require 'dynflow/testing/mimic'
|
21
21
|
require 'dynflow/testing/managed_clock'
|
22
|
+
require 'dynflow/testing/dummy_coordinator'
|
22
23
|
require 'dynflow/testing/dummy_world'
|
23
24
|
require 'dynflow/testing/dummy_executor'
|
24
25
|
require 'dynflow/testing/dummy_execution_plan'
|
data/lib/dynflow/version.rb
CHANGED
data/lib/dynflow/world.rb
CHANGED
@@ -200,6 +200,14 @@ module Dynflow
|
|
200
200
|
Scheduled[execution_plan.id]
|
201
201
|
end
|
202
202
|
|
203
|
+
def plan_elsewhere(action_class, *args)
|
204
|
+
execution_plan = ExecutionPlan.new(self, nil)
|
205
|
+
execution_plan.delay(nil, action_class, {}, *args)
|
206
|
+
plan_request(execution_plan.id)
|
207
|
+
|
208
|
+
Scheduled[execution_plan.id]
|
209
|
+
end
|
210
|
+
|
203
211
|
def plan(action_class, *args)
|
204
212
|
plan_with_options(action_class: action_class, args: args)
|
205
213
|
end
|
@@ -227,6 +235,10 @@ module Dynflow
|
|
227
235
|
publish_request(Dispatcher::Event[execution_plan_id, step_id, event, time, optional], accepted, false)
|
228
236
|
end
|
229
237
|
|
238
|
+
def plan_request(execution_plan_id, done = Concurrent::Promises.resolvable_future)
|
239
|
+
publish_request(Dispatcher::Planning[execution_plan_id], done, false)
|
240
|
+
end
|
241
|
+
|
230
242
|
def ping(world_id, timeout, done = Concurrent::Promises.resolvable_future)
|
231
243
|
publish_request(Dispatcher::Ping[world_id, true], done, false, timeout)
|
232
244
|
end
|
@@ -60,6 +60,18 @@ module Dynflow
|
|
60
60
|
execution_plan_hooks.use :raise_flag_root_only, :on => :stopped
|
61
61
|
end
|
62
62
|
|
63
|
+
class PendingAction < ::Dynflow::Action
|
64
|
+
include FlagHook
|
65
|
+
|
66
|
+
execution_plan_hooks.use :raise_flag, :on => :pending
|
67
|
+
end
|
68
|
+
|
69
|
+
class AllTransitionsAction < ::Dynflow::Action
|
70
|
+
include FlagHook
|
71
|
+
|
72
|
+
execution_plan_hooks.use :raise_flag
|
73
|
+
end
|
74
|
+
|
63
75
|
class ComposedAction < RootOnlyAction
|
64
76
|
def plan
|
65
77
|
plan_action(RootOnlyAction)
|
@@ -161,6 +173,30 @@ module Dynflow
|
|
161
173
|
plan.finished.wait!
|
162
174
|
_(Flag.raised_count).must_equal 1
|
163
175
|
end
|
176
|
+
|
177
|
+
it 'runs the pending hooks when execution plan is created' do
|
178
|
+
refute Flag.raised?
|
179
|
+
plan = world.trigger(PendingAction)
|
180
|
+
plan.finished.wait!
|
181
|
+
_(Flag.raised_count).must_equal 1
|
182
|
+
end
|
183
|
+
|
184
|
+
it 'runs the pending hooks when execution plan is created' do
|
185
|
+
refute Flag.raised?
|
186
|
+
delay = world.delay(PendingAction, { :start_at => Time.now.utc + 180 })
|
187
|
+
delayed_plan = world.persistence.load_delayed_plan(delay.execution_plan_id)
|
188
|
+
delayed_plan.execution_plan.cancel.each(&:wait)
|
189
|
+
_(Flag.raised_count).must_equal 1
|
190
|
+
end
|
191
|
+
|
192
|
+
it 'runs the hook on every state transition' do
|
193
|
+
refute Flag.raised?
|
194
|
+
plan = world.trigger(AllTransitionsAction)
|
195
|
+
plan.finished.wait!
|
196
|
+
# There should be 5 transitions
|
197
|
+
# nothing -> pending -> planning -> planned -> running -> stopped
|
198
|
+
_(Flag.raised_count).must_equal 5
|
199
|
+
end
|
164
200
|
end
|
165
201
|
end
|
166
202
|
end
|
@@ -29,14 +29,17 @@ module Dynflow
|
|
29
29
|
describe 'abstract executor' do
|
30
30
|
let(:abstract_delayed_executor) { DelayedExecutors::AbstractCore.new(world) }
|
31
31
|
|
32
|
-
it 'handles
|
32
|
+
it 'handles plan in planning state' do
|
33
33
|
delayed_plan.execution_plan.state = :planning
|
34
34
|
abstract_delayed_executor.send(:process, [delayed_plan], @start_at)
|
35
|
-
_(delayed_plan.execution_plan.state).must_equal :
|
35
|
+
_(delayed_plan.execution_plan.state).must_equal :scheduled
|
36
|
+
end
|
36
37
|
|
38
|
+
it 'handles plan in running state' do
|
37
39
|
delayed_plan.execution_plan.set_state(:running, true)
|
38
40
|
abstract_delayed_executor.send(:process, [delayed_plan], @start_at)
|
39
41
|
_(delayed_plan.execution_plan.state).must_equal :running
|
42
|
+
_(world.persistence.load_delayed_plan(delayed_plan.execution_plan_uuid)).must_be :nil?
|
40
43
|
end
|
41
44
|
end
|
42
45
|
|
@@ -55,7 +58,7 @@ module Dynflow
|
|
55
58
|
|
56
59
|
it 'delays the action' do
|
57
60
|
_(execution_plan.steps.count).must_equal 1
|
58
|
-
_(delayed_plan.start_at).
|
61
|
+
_(delayed_plan.start_at.to_i).must_equal(@start_at.to_i)
|
59
62
|
_(history_names.call(execution_plan)).must_equal ['delay']
|
60
63
|
end
|
61
64
|
|
data/web/views/flow_step.erb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dynflow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ivan Necas
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2021-
|
12
|
+
date: 2021-09-21 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: multi_json
|
@@ -25,6 +25,20 @@ dependencies:
|
|
25
25
|
- - ">="
|
26
26
|
- !ruby/object:Gem::Version
|
27
27
|
version: '0'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
name: msgpack
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - "~>"
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: 1.3.3
|
35
|
+
type: :runtime
|
36
|
+
prerelease: false
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - "~>"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: 1.3.3
|
28
42
|
- !ruby/object:Gem::Dependency
|
29
43
|
name: apipie-params
|
30
44
|
requirement: !ruby/object:Gem::Requirement
|
@@ -387,6 +401,7 @@ files:
|
|
387
401
|
- doc/pages/source/projects/index.md
|
388
402
|
- docker-compose.yml
|
389
403
|
- dynflow.gemspec
|
404
|
+
- examples/chunked_output_benchmark.rb
|
390
405
|
- examples/clock_benchmark.rb
|
391
406
|
- examples/example_helper.rb
|
392
407
|
- examples/future_execution.rb
|
@@ -398,6 +413,7 @@ files:
|
|
398
413
|
- examples/sub_plan_concurrency_control.rb
|
399
414
|
- examples/sub_plans.rb
|
400
415
|
- examples/termination.rb
|
416
|
+
- extras/expand/main.go
|
401
417
|
- extras/statsd_mapping.conf
|
402
418
|
- lib/dynflow.rb
|
403
419
|
- lib/dynflow/action.rb
|
@@ -515,6 +531,8 @@ files:
|
|
515
531
|
- lib/dynflow/persistence_adapters/sequel_migrations/018_add_uuid_column.rb
|
516
532
|
- lib/dynflow/persistence_adapters/sequel_migrations/019_update_mysql_time_precision.rb
|
517
533
|
- lib/dynflow/persistence_adapters/sequel_migrations/020_drop_duplicate_indices.rb
|
534
|
+
- lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb
|
535
|
+
- lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb
|
518
536
|
- lib/dynflow/rails.rb
|
519
537
|
- lib/dynflow/rails/configuration.rb
|
520
538
|
- lib/dynflow/rails/daemon.rb
|
@@ -536,6 +554,7 @@ files:
|
|
536
554
|
- lib/dynflow/telemetry_adapters/statsd.rb
|
537
555
|
- lib/dynflow/testing.rb
|
538
556
|
- lib/dynflow/testing/assertions.rb
|
557
|
+
- lib/dynflow/testing/dummy_coordinator.rb
|
539
558
|
- lib/dynflow/testing/dummy_execution_plan.rb
|
540
559
|
- lib/dynflow/testing/dummy_executor.rb
|
541
560
|
- lib/dynflow/testing/dummy_planned_action.rb
|