dynflow 1.5.0 → 1.6.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (35) hide show
  1. checksums.yaml +4 -4
  2. data/dynflow.gemspec +1 -0
  3. data/examples/chunked_output_benchmark.rb +77 -0
  4. data/extras/expand/main.go +180 -0
  5. data/lib/dynflow/action.rb +11 -1
  6. data/lib/dynflow/delayed_executors/abstract_core.rb +11 -9
  7. data/lib/dynflow/director.rb +37 -4
  8. data/lib/dynflow/dispatcher/client_dispatcher.rb +1 -1
  9. data/lib/dynflow/dispatcher/executor_dispatcher.rb +8 -0
  10. data/lib/dynflow/dispatcher.rb +5 -1
  11. data/lib/dynflow/execution_plan/hooks.rb +1 -1
  12. data/lib/dynflow/execution_plan/steps/abstract_flow_step.rb +1 -0
  13. data/lib/dynflow/execution_plan.rb +4 -1
  14. data/lib/dynflow/executors/abstract/core.rb +9 -0
  15. data/lib/dynflow/executors/parallel.rb +4 -0
  16. data/lib/dynflow/extensions/msgpack.rb +41 -0
  17. data/lib/dynflow/extensions.rb +6 -0
  18. data/lib/dynflow/persistence.rb +10 -0
  19. data/lib/dynflow/persistence_adapters/sequel.rb +51 -16
  20. data/lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb +30 -0
  21. data/lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb +90 -0
  22. data/lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb +19 -0
  23. data/lib/dynflow/serializable.rb +2 -2
  24. data/lib/dynflow/testing/dummy_coordinator.rb +10 -0
  25. data/lib/dynflow/testing/dummy_planned_action.rb +4 -0
  26. data/lib/dynflow/testing/dummy_world.rb +2 -1
  27. data/lib/dynflow/testing.rb +1 -0
  28. data/lib/dynflow/version.rb +1 -1
  29. data/lib/dynflow/world.rb +12 -0
  30. data/lib/dynflow.rb +1 -0
  31. data/test/execution_plan_hooks_test.rb +36 -0
  32. data/test/extensions_test.rb +42 -0
  33. data/test/future_execution_test.rb +6 -3
  34. data/web/views/flow_step.erb +1 -0
  35. metadata +26 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5b33bcad7864102ad9f0f3bb654c7990b3037b7b0620b770d5f34f1495402855
4
- data.tar.gz: f4089f0cb793384a764f4aa042268c19d174c989c5d348f63e126eac5fb94716
3
+ metadata.gz: f660ffddfd3e3c7ea4b8414d2e6f27f48d897d34280ff3bec3c8ab6e360b0bfe
4
+ data.tar.gz: 46cd643c84f09640a07f00eafc3f4d3feda65746d3c353338d8141277fd77e03
5
5
  SHA512:
6
- metadata.gz: c119f0ce0d3605012206b083b829bc85194d676f762d61a394be6257d376c56388efd72f4a77f78445ece78942c5eb561728031e9a703767113eab7780b9d0e3
7
- data.tar.gz: 136dafb81d0766e9c1041a9a52896b4cbe0d300482be729900fd9fe8f83e6095bbc084a6d43d0aa9012b436544690d3c2d774d46f0b9c573775b3707daec3428
6
+ metadata.gz: 99e59816fbf809403a87bbb1f96ff39958d3d059572de7a66f6205df7c48867baa50d77636d287aebb8c69d73db2cef686860520c6e7db27dbfa8217f5570715
7
+ data.tar.gz: 315b37f302e1c29eae52d00acabd82b605d88b7982ba3e01c920dd18619967115875ec5a1facffc6299280f01aaa61db8162ce3e02bbbd0ba06a95add1b88d64
data/dynflow.gemspec CHANGED
@@ -20,6 +20,7 @@ Gem::Specification.new do |s|
20
20
  s.required_ruby_version = '>= 2.3.0'
21
21
 
22
22
  s.add_dependency "multi_json"
23
+ s.add_dependency "msgpack", '~> 1.3.3'
23
24
  s.add_dependency "apipie-params"
24
25
  s.add_dependency "algebrick", '~> 0.7.0'
25
26
  s.add_dependency "concurrent-ruby", '~> 1.1.3'
@@ -0,0 +1,77 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require_relative 'example_helper'
5
+ require 'benchmark'
6
+
7
+ WORDS = File.readlines('/usr/share/dict/words').map(&:chomp).freeze
8
+ COUNT = WORDS.count
9
+
10
+ module Common
11
+ def main_loop
12
+ if output[:current] < input[:limit]
13
+ consumed = yield
14
+ output[:current] += consumed
15
+ plan_event(nil)
16
+ suspend
17
+ end
18
+ end
19
+
20
+ def batch
21
+ WORDS.drop(output[:current]).take(input[:chunk])
22
+ end
23
+ end
24
+
25
+ class Regular < ::Dynflow::Action
26
+ include Common
27
+
28
+ def run(event = nil)
29
+ output[:current] ||= 0
30
+ output[:words] ||= []
31
+
32
+ main_loop do
33
+ words = batch
34
+ output[:words] << words
35
+ words.count
36
+ end
37
+ end
38
+ end
39
+
40
+ class Chunked < ::Dynflow::Action
41
+ include Common
42
+
43
+ def run(event = nil)
44
+ output[:current] ||= 0
45
+
46
+ main_loop do
47
+ words = batch
48
+ output_chunk(words)
49
+ words.count
50
+ end
51
+ end
52
+ end
53
+
54
+ if $0 == __FILE__
55
+ ExampleHelper.world.action_logger.level = 4
56
+ ExampleHelper.world.logger.level = 4
57
+
58
+ Benchmark.bm do |bm|
59
+ bm.report('regular 1000 by 100') { ExampleHelper.world.trigger(Regular, limit: 1000, chunk: 100).finished.wait }
60
+ bm.report('chunked 1000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 1000, chunk: 100).finished.wait }
61
+
62
+ bm.report('regular 10_000 by 100') { ExampleHelper.world.trigger(Regular, limit: 10_000, chunk: 100).finished.wait }
63
+ bm.report('chunked 10_000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 10_000, chunk: 100).finished.wait }
64
+
65
+ bm.report('regular 10_000 by 1000') { ExampleHelper.world.trigger(Regular, limit: 10_000, chunk: 1000).finished.wait }
66
+ bm.report('chunked 10_000 by 1000') { ExampleHelper.world.trigger(Chunked, limit: 10_000, chunk: 1000).finished.wait }
67
+
68
+ bm.report('regular 100_000 by 100') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 100).finished.wait }
69
+ bm.report('chunked 100_000 by 100') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 100).finished.wait }
70
+
71
+ bm.report('regular 100_000 by 1000') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 1000).finished.wait }
72
+ bm.report('chunked 100_000 by 1000') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 1000).finished.wait }
73
+
74
+ bm.report('regular 100_000 by 10_000') { ExampleHelper.world.trigger(Regular, limit: 100_000, chunk: 10_000).finished.wait }
75
+ bm.report('chunked 100_000 by 10_000') { ExampleHelper.world.trigger(Chunked, limit: 100_000, chunk: 10_000).finished.wait }
76
+ end
77
+ end
@@ -0,0 +1,180 @@
1
+ package main
2
+
3
+ import (
4
+ "encoding/csv"
5
+ "encoding/hex"
6
+ "encoding/json"
7
+ "github.com/vmihailenco/msgpack"
8
+ "io"
9
+ "os"
10
+ )
11
+
12
+ // dynflow_steps
13
+ // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
14
+ // execution_plan_uuid,id,action_id,data,state,started_at,ended_at,real_time,execution_time,progress_done,progress_weight,class,error,action_class,children,queue
15
+ //
16
+ // encoded columns are:
17
+ // 3 - data
18
+ // 12 - error
19
+ // 14 - children
20
+
21
+ // dynflow_actions
22
+ // 0 1 2 3 4 5 6 7 8 9 10
23
+ // execution_plan_uuid,id,data,caller_execution_plan_id,caller_action_id,class,input,output,plan_step_id,run_step_id,finalize_step_id
24
+ //
25
+ // encoded columns are:
26
+ // 2 - data
27
+ // 6 - input
28
+ // 7 - output
29
+
30
+ // dynflow_execution_plans
31
+ // Without msgpack
32
+ // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
33
+ // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,run_flow,finalize_flow,execution_history,root_plan_step_id,step_ids
34
+
35
+ // With msgpack
36
+ // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
37
+ // uuid,data,state,result,started_at,ended_at,real_time,execution_time,label,class,root_plan_step_id,run_flow,finalize_flow,execution_history,step_ids
38
+ //
39
+ // 1 - data
40
+ // 11 - run_flow
41
+ // 12 - finalize_flow
42
+ // 13 - execution_history
43
+ // 14 - step_ids
44
+
45
+ func main() {
46
+ reader := csv.NewReader(os.Stdin)
47
+ writer := csv.NewWriter(os.Stdout)
48
+ defer writer.Flush()
49
+
50
+ for {
51
+ record, err := reader.Read()
52
+ if err == io.EOF {
53
+ break
54
+ }
55
+
56
+ writer.Write(processRow(record))
57
+ }
58
+ }
59
+
60
+ func processRow(record []string) []string {
61
+ // Execution plan exports have 15 fields, other exports have different counts
62
+ if len(record) == 15 {
63
+ record = expandExecutionPlan(record)
64
+ }
65
+
66
+ for i, r := range record {
67
+ record[i] = reencodeField(r)
68
+ }
69
+
70
+ return record
71
+ }
72
+
73
+ func expandExecutionPlan(record []string) []string {
74
+ var flow_columns [2]int
75
+
76
+ // The step_ids field should be a safe indicator
77
+ if isHexEncoded(record[14]) {
78
+ flow_columns = [...]int{11, 12}
79
+ } else {
80
+ flow_columns = [...]int{10, 11}
81
+ }
82
+
83
+ for _, i := range flow_columns {
84
+ record[i] = expandFlow(record[i])
85
+ }
86
+ return record
87
+ }
88
+
89
+ func isHexEncoded(field string) bool {
90
+ return len(field) >= 2 && field[0:2] == "\\x"
91
+ }
92
+
93
+ func reencodeField(field string) string {
94
+ decoded, err := decode(field)
95
+ if err != nil {
96
+ return field
97
+ }
98
+
99
+ return encode(decoded)
100
+ }
101
+
102
+ func decode(field string) (interface{}, error) {
103
+ var intermediate interface{}
104
+ bytes := []byte(field)
105
+
106
+ if isHexEncoded(field) {
107
+ decoded_bytes, err := hex.DecodeString(field[2:])
108
+ if err != nil {
109
+ return "", err
110
+ }
111
+
112
+ err = msgpack.Unmarshal(decoded_bytes, &intermediate)
113
+ if err != nil {
114
+ return "", err
115
+ }
116
+
117
+ return intermediate, nil
118
+ }
119
+
120
+ err := json.Unmarshal(bytes, &intermediate)
121
+ if err != nil {
122
+ return "", err
123
+ }
124
+
125
+ return intermediate, nil
126
+ }
127
+
128
+ func encode(data interface{}) string {
129
+ result, err := json.Marshal(data)
130
+ if err != nil {
131
+ panic(err)
132
+ }
133
+
134
+ return string(result)
135
+ }
136
+
137
+ func expandFlow(field string) string {
138
+ intermediate, err := decode(field)
139
+ if err != nil {
140
+ return field
141
+ }
142
+
143
+ var result map[string]interface{}
144
+ switch intermediate.(type) {
145
+ // old style hash
146
+ case map[string]interface{}:
147
+ result = intermediate.(map[string]interface{})
148
+ // newer compact S-expression like representation
149
+ case []interface{}, float64:
150
+ result = expandCompactFlow(intermediate)
151
+ }
152
+
153
+ return encode(result)
154
+ }
155
+
156
+ func expandCompactFlow(flow interface{}) map[string]interface{} {
157
+ result := make(map[string]interface{})
158
+ switch flow.(type) {
159
+ case []interface{}:
160
+ switch flow.([]interface{})[0] {
161
+ case "S":
162
+ result["class"] = "Dynflow::Flows::Sequence"
163
+ case "C":
164
+ result["class"] = "Dynflow::Flows::Concurrence"
165
+ default:
166
+ panic("Unknown flow type")
167
+ }
168
+ var subflows []interface{}
169
+ for subflow := range flow.([]interface{})[1:] {
170
+ subflows = append(subflows, expandCompactFlow(subflow))
171
+ }
172
+ result["flows"] = subflows
173
+ case float64, int:
174
+ result["class"] = "Dynflow::Flows::Atom"
175
+ result["step_id"] = flow
176
+ default:
177
+ panic("Unknown flow type")
178
+ }
179
+ return result
180
+ }
@@ -105,7 +105,8 @@ module Dynflow
105
105
 
106
106
  attr_reader :world, :phase, :execution_plan_id, :id, :input,
107
107
  :plan_step_id, :run_step_id, :finalize_step_id,
108
- :caller_execution_plan_id, :caller_action_id
108
+ :caller_execution_plan_id, :caller_action_id,
109
+ :pending_output_chunks
109
110
 
110
111
  middleware.use Action::Progress::Calculate
111
112
 
@@ -133,6 +134,7 @@ module Dynflow
133
134
 
134
135
  @input = OutputReference.deserialize getter.(:input, phase?(Run, Finalize, Present))
135
136
  @output = OutputReference.deserialize getter.(:output, false) if phase? Run, Finalize, Present
137
+ @pending_output_chunks = [] if phase? Run, Finalize
136
138
  end
137
139
 
138
140
  def phase?(*phases)
@@ -169,6 +171,14 @@ module Dynflow
169
171
  end
170
172
  end
171
173
 
174
+ def output_chunk(chunk, kind: nil, timestamp: Time.now)
175
+ @pending_output_chunks << { chunk: chunk, kind: kind, timestamp: timestamp }
176
+ end
177
+
178
+ def stored_output_chunks
179
+ @output_chunks ||= world.persistence.load_output_chunks(@execution_plan_id, @id)
180
+ end
181
+
172
182
  def caller_action
173
183
  phase! Present
174
184
  return nil if @caller_action_id
@@ -46,24 +46,21 @@ module Dynflow
46
46
 
47
47
  def process(delayed_plans, check_time)
48
48
  processed_plan_uuids = []
49
+ dispatched_plan_uuids = []
50
+ planning_locks = world.coordinator.find_records(class: Coordinator::PlanningLock.name)
49
51
  delayed_plans.each do |plan|
50
- next if plan.frozen
52
+ next if plan.frozen || locked_for_planning?(planning_locks, plan)
51
53
  fix_plan_state(plan)
52
54
  with_error_handling do
53
55
  if plan.execution_plan.state != :scheduled
54
56
  # in case the previous process was terminated after running the plan, but before deleting the delayed plan record.
55
57
  @logger.info("Execution plan #{plan.execution_plan_uuid} is expected to be in 'scheduled' state, was '#{plan.execution_plan.state}', skipping")
56
- elsif !plan.start_before.nil? && plan.start_before < check_time
57
- @logger.debug "Failing plan #{plan.execution_plan_uuid}"
58
- plan.timeout
58
+ processed_plan_uuids << plan.execution_plan_uuid
59
59
  else
60
60
  @logger.debug "Executing plan #{plan.execution_plan_uuid}"
61
- Executors.run_user_code do
62
- plan.plan
63
- plan.execute
64
- end
61
+ world.plan_request(plan.execution_plan_uuid)
62
+ dispatched_plan_uuids << plan.execution_plan_uuid
65
63
  end
66
- processed_plan_uuids << plan.execution_plan_uuid
67
64
  end
68
65
  end
69
66
  world.persistence.delete_delayed_plans(:execution_plan_uuid => processed_plan_uuids) unless processed_plan_uuids.empty?
@@ -72,6 +69,7 @@ module Dynflow
72
69
  private
73
70
 
74
71
  # handle the case, where the process was termintated while planning was in progress before
72
+ # TODO: Doing execution plan updates in orchestrator is bad
75
73
  def fix_plan_state(plan)
76
74
  if plan.execution_plan.state == :planning
77
75
  @logger.info("Execution plan #{plan.execution_plan_uuid} is expected to be in 'scheduled' state, was '#{plan.execution_plan.state}', auto-fixing")
@@ -79,6 +77,10 @@ module Dynflow
79
77
  plan.execution_plan.save
80
78
  end
81
79
  end
80
+
81
+ def locked_for_planning?(planning_locks, plan)
82
+ planning_locks.any? { |lock| lock.execution_plan_id == plan.execution_plan_uuid }
83
+ end
82
84
  end
83
85
  end
84
86
  end
@@ -53,7 +53,7 @@ module Dynflow
53
53
  end
54
54
 
55
55
  def self.new_from_hash(hash, *_args)
56
- self.new(hash[:execution_plan_id], hash[:queue])
56
+ self.new(hash[:execution_plan_id], hash[:queue], hash[:sender_orchestrator_id])
57
57
  end
58
58
  end
59
59
 
@@ -108,6 +108,26 @@ module Dynflow
108
108
  end
109
109
  end
110
110
 
111
+ class PlanningWorkItem < WorkItem
112
+ def execute
113
+ plan = world.persistence.load_delayed_plan(execution_plan_id)
114
+ return if plan.nil? || plan.execution_plan.state != :scheduled
115
+
116
+ if !plan.start_before.nil? && plan.start_before < Time.now.utc()
117
+ plan.timeout
118
+ return
119
+ end
120
+
121
+ world.coordinator.acquire(Coordinator::PlanningLock.new(world, plan.execution_plan_uuid)) do
122
+ plan.plan
123
+ end
124
+ plan.execute
125
+ rescue => e
126
+ world.logger.warn e.message
127
+ world.logger.debug e.backtrace.join("\n")
128
+ end
129
+ end
130
+
111
131
  class FinalizeWorkItem < WorkItem
112
132
  attr_reader :finalize_steps_data
113
133
 
@@ -147,12 +167,18 @@ module Dynflow
147
167
  @logger = world.logger
148
168
  @execution_plan_managers = {}
149
169
  @rescued_steps = {}
170
+ @planning_plans = []
150
171
  end
151
172
 
152
173
  def current_execution_plan_ids
153
174
  @execution_plan_managers.keys
154
175
  end
155
176
 
177
+ def handle_planning(execution_plan_uuid)
178
+ @planning_plans << execution_plan_uuid
179
+ [PlanningWorkItem.new(execution_plan_uuid, :default, @world.id)]
180
+ end
181
+
156
182
  def start_execution(execution_plan_id, finished)
157
183
  manager = track_execution_plan(execution_plan_id, finished)
158
184
  return [] unless manager
@@ -176,9 +202,16 @@ module Dynflow
176
202
  end
177
203
 
178
204
  def work_finished(work)
179
- manager = @execution_plan_managers[work.execution_plan_id]
180
- return [] unless manager # skip case when getting event from execution plan that is not running anymore
181
- unless_done(manager, manager.what_is_next(work))
205
+ case work
206
+ when PlanningWorkItem
207
+ @planning_plans.delete(work.execution_plan_id)
208
+ @world.persistence.delete_delayed_plans(:execution_plan_uuid => work.execution_plan_id)
209
+ []
210
+ else
211
+ manager = @execution_plan_managers[work.execution_plan_id]
212
+ return [] unless manager # skip case when getting event from execution plan that is not running anymore
213
+ unless_done(manager, manager.what_is_next(work))
214
+ end
182
215
  end
183
216
 
184
217
  # called when there was an unhandled exception during the execution
@@ -134,7 +134,7 @@ module Dynflow
134
134
  def dispatch_request(request, client_world_id, request_id)
135
135
  ignore_unknown = false
136
136
  executor_id = match request,
137
- (on ~Execution do |execution|
137
+ (on ~Execution | ~Planning do |execution|
138
138
  AnyExecutor
139
139
  end),
140
140
  (on ~Event do |event|
@@ -9,6 +9,7 @@ module Dynflow
9
9
 
10
10
  def handle_request(envelope)
11
11
  match(envelope.message,
12
+ on(Planning) { perform_planning(envelope, envelope.message)},
12
13
  on(Execution) { perform_execution(envelope, envelope.message) },
13
14
  on(Event) { perform_event(envelope, envelope.message) },
14
15
  on(Status) { get_execution_status(envelope, envelope.message) })
@@ -16,6 +17,13 @@ module Dynflow
16
17
 
17
18
  protected
18
19
 
20
+ def perform_planning(envelope, planning)
21
+ @world.executor.plan(planning.execution_plan_id)
22
+ respond(envelope, Accepted)
23
+ rescue Dynflow::Error => e
24
+ respond(envelope, Failed[e.message])
25
+ end
26
+
19
27
  def perform_execution(envelope, execution)
20
28
  allocate_executor(execution.execution_plan_id, envelope.sender_id, envelope.request_id)
21
29
  execution_lock = Coordinator::ExecutionLock.new(@world, execution.execution_plan_id, envelope.sender_id, envelope.request_id)
@@ -14,6 +14,10 @@ module Dynflow
14
14
  fields! execution_plan_id: String
15
15
  end
16
16
 
17
+ Planning = type do
18
+ fields! execution_plan_id: String
19
+ end
20
+
17
21
  Ping = type do
18
22
  fields! receiver_id: String,
19
23
  use_cache: type { variants TrueClass, FalseClass }
@@ -24,7 +28,7 @@ module Dynflow
24
28
  execution_plan_id: type { variants String, NilClass }
25
29
  end
26
30
 
27
- variants Event, Execution, Ping, Status
31
+ variants Event, Execution, Ping, Status, Planning
28
32
  end
29
33
 
30
34
  Response = Algebrick.type do
@@ -21,7 +21,7 @@ module Dynflow
21
21
  # @param class_name [Class] class of the hook to be run
22
22
  # @param on [Symbol, Array<Symbol>] when should the hook be run, one of {HOOK_KINDS}
23
23
  # @return [void]
24
- def use(class_name, on: HOOK_KINDS)
24
+ def use(class_name, on: ExecutionPlan.states)
25
25
  on = Array[on] unless on.kind_of?(Array)
26
26
  validate_kinds!(on)
27
27
  if hooks[class_name]
@@ -31,6 +31,7 @@ module Dynflow
31
31
  action = persistence.load_action(self)
32
32
  yield action
33
33
  persistence.save_action(execution_plan_id, action)
34
+ persistence.save_output_chunks(execution_plan_id, action.id, action.pending_output_chunks)
34
35
  save
35
36
 
36
37
  return self
@@ -254,6 +254,7 @@ module Dynflow
254
254
  def delay(caller_action, action_class, delay_options, *args)
255
255
  save
256
256
  @root_plan_step = add_scheduling_step(action_class, caller_action)
257
+ run_hooks(:pending)
257
258
  serializer = root_plan_step.delay(delay_options, args)
258
259
  delayed_plan = DelayedPlan.new(@world,
259
260
  id,
@@ -276,7 +277,9 @@ module Dynflow
276
277
  raise "Unexpected options #{options.keys.inspect}" unless options.empty?
277
278
  save
278
279
  @root_plan_step = add_plan_step(action_class, caller_action)
279
- @root_plan_step.save
280
+ step = @root_plan_step.save
281
+ run_hooks(:pending)
282
+ step
280
283
  end
281
284
 
282
285
  def plan(*args)
@@ -35,6 +35,15 @@ module Dynflow
35
35
  handle_work(@director.handle_event(event))
36
36
  end
37
37
 
38
+ def handle_planning(execution_plan_id)
39
+ if terminating?
40
+ raise Dynflow::Error,
41
+ "cannot accept event: #{event} core is terminating"
42
+ end
43
+
44
+ handle_work(@director.handle_planning(execution_plan_id))
45
+ end
46
+
38
47
  def plan_events(delayed_events)
39
48
  delayed_events.each do |event|
40
49
  @world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time, optional: event.optional)
@@ -38,6 +38,10 @@ module Dynflow
38
38
  future
39
39
  end
40
40
 
41
+ def plan(execution_plan_id)
42
+ @core.ask([:handle_planning, execution_plan_id])
43
+ end
44
+
41
45
  def delayed_event(director_event)
42
46
  @core.ask([:handle_event, director_event])
43
47
  director_event.result
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+ require 'msgpack'
3
+
4
+ module Dynflow
5
+ module Extensions
6
+ module MsgPack
7
+ module Time
8
+ def to_msgpack(out = ''.dup)
9
+ ::MessagePack.pack(self, out)
10
+ out
11
+ end
12
+ end
13
+
14
+ ::Time.include ::Dynflow::Extensions::MsgPack::Time
15
+ ::MessagePack::DefaultFactory.register_type(0x00, Time, packer: MessagePack::Time::Packer, unpacker: MessagePack::Time::Unpacker)
16
+
17
+ begin
18
+ require 'active_support/time_with_zone'
19
+ unpacker = ->(payload) do
20
+ tv = MessagePack::Timestamp.from_msgpack_ext(payload)
21
+ ::Time.zone.at(tv.sec, tv.nsec, :nanosecond)
22
+ end
23
+ ::ActiveSupport::TimeWithZone.include ::Dynflow::Extensions::MsgPack::Time
24
+ ::MessagePack::DefaultFactory.register_type(0x01, ActiveSupport::TimeWithZone, packer: MessagePack::Time::Packer, unpacker: unpacker)
25
+
26
+ ::DateTime.include ::Dynflow::Extensions::MsgPack::Time
27
+ ::MessagePack::DefaultFactory.register_type(0x02, DateTime,
28
+ packer: ->(datetime) { MessagePack::Time::Packer.(datetime.to_time) },
29
+ unpacker: ->(payload) { unpacker.(payload).to_datetime })
30
+
31
+ ::Date.include ::Dynflow::Extensions::MsgPack::Time
32
+ ::MessagePack::DefaultFactory.register_type(0x03, Date,
33
+ packer: ->(date) { MessagePack::Time::Packer.(date.to_time) },
34
+ unpacker: ->(payload) { unpacker.(payload).to_date })
35
+ rescue LoadError
36
+ # This is fine
37
+ nil
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,6 @@
1
+ # frozen_string_literal: true
2
+ module Dynflow
3
+ module Extensions
4
+ require 'dynflow/extensions/msgpack'
5
+ end
6
+ end
@@ -46,6 +46,16 @@ module Dynflow
46
46
  adapter.save_action(execution_plan_id, action.id, action.to_hash)
47
47
  end
48
48
 
49
+ def save_output_chunks(execution_plan_id, action_id, chunks)
50
+ return if chunks.empty?
51
+
52
+ adapter.save_output_chunks(execution_plan_id, action_id, chunks)
53
+ end
54
+
55
+ def load_output_chunks(execution_plan_id, action_id)
56
+ adapter.load_output_chunks(execution_plan_id, action_id)
57
+ end
58
+
49
59
  def find_execution_plans(options)
50
60
  adapter.find_execution_plans(options).map do |execution_plan_hash|
51
61
  ExecutionPlan.new_from_hash(execution_plan_hash, @world)
@@ -1,9 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
  require 'sequel'
3
- require 'multi_json'
3
+ require 'msgpack'
4
4
  require 'fileutils'
5
5
  require 'csv'
6
6
 
7
+ # rubocop:disable Metrics/ClassLength
7
8
  module Dynflow
8
9
  module PersistenceAdapters
9
10
 
@@ -37,12 +38,14 @@ module Dynflow
37
38
  class action_class execution_plan_uuid queue),
38
39
  envelope: %w(receiver_id),
39
40
  coordinator_record: %w(id owner_id class),
40
- delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen)}
41
+ delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen),
42
+ output_chunk: %w(execution_plan_uuid action_id kind timestamp) }
41
43
 
42
44
  SERIALIZABLE_COLUMNS = { action: %w(input output),
43
45
  delayed: %w(serialized_args),
44
46
  execution_plan: %w(run_flow finalize_flow execution_history step_ids),
45
- step: %w(error children) }
47
+ step: %w(error children),
48
+ output_chunk: %w(chunk) }
46
49
 
47
50
  def initialize(config)
48
51
  migrate = true
@@ -83,15 +86,17 @@ module Dynflow
83
86
  table(:delayed).where(execution_plan_uuid: uuids).delete
84
87
 
85
88
  steps = table(:step).where(execution_plan_uuid: uuids)
86
- backup_to_csv(steps, backup_dir, 'steps.csv') if backup_dir
89
+ backup_to_csv(:step, steps, backup_dir, 'steps.csv') if backup_dir
87
90
  steps.delete
88
91
 
92
+ output_chunks = table(:output_chunk).where(execution_plan_uuid: uuids).delete
93
+
89
94
  actions = table(:action).where(execution_plan_uuid: uuids)
90
- backup_to_csv(actions, backup_dir, 'actions.csv') if backup_dir
95
+ backup_to_csv(:action, actions, backup_dir, 'actions.csv') if backup_dir
91
96
  actions.delete
92
97
 
93
98
  execution_plans = table(:execution_plan).where(uuid: uuids)
94
- backup_to_csv(execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
99
+ backup_to_csv(:execution_plan, execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
95
100
  count += execution_plans.delete
96
101
  end
97
102
  end
@@ -173,6 +178,18 @@ module Dynflow
173
178
  save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value, with_data: false
174
179
  end
175
180
 
181
+ def save_output_chunks(execution_plan_id, action_id, chunks)
182
+ chunks.each do |chunk|
183
+ chunk[:execution_plan_uuid] = execution_plan_id
184
+ chunk[:action_id] = action_id
185
+ save :output_chunk, {}, chunk, with_data: false
186
+ end
187
+ end
188
+
189
+ def load_output_chunks(execution_plan_id, action_id)
190
+ load_records :output_chunk, { execution_plan_uuid: execution_plan_id, action_id: action_id }, [:timestamp, :kind, :chunk]
191
+ end
192
+
176
193
  def connector_feature!
177
194
  unless @additional_responsibilities[:connector]
178
195
  raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
@@ -265,14 +282,16 @@ module Dynflow
265
282
  step: :dynflow_steps,
266
283
  envelope: :dynflow_envelopes,
267
284
  coordinator_record: :dynflow_coordinator_records,
268
- delayed: :dynflow_delayed_plans }
285
+ delayed: :dynflow_delayed_plans,
286
+ output_chunk: :dynflow_output_chunks }
269
287
 
270
288
  def table(which)
271
289
  db[TABLES.fetch(which)]
272
290
  end
273
291
 
274
292
  def initialize_db(db_path)
275
- ::Sequel.connect db_path
293
+ logger = Logger.new($stderr) if ENV['DYNFLOW_SQL_LOG']
294
+ ::Sequel.connect db_path, logger: logger
276
295
  end
277
296
 
278
297
  def self.migrations_path
@@ -281,10 +300,15 @@ module Dynflow
281
300
 
282
301
  def prepare_record(table_name, value, base = {}, with_data = true)
283
302
  record = base.dup
284
- if with_data && table(table_name).columns.include?(:data)
303
+ has_data_column = table(table_name).columns.include?(:data)
304
+ if with_data && has_data_column
285
305
  record[:data] = dump_data(value)
286
306
  else
287
- record[:data] = nil
307
+ if has_data_column
308
+ record[:data] = nil
309
+ else
310
+ record.delete(:data)
311
+ end
288
312
  record.merge! serialize_columns(table_name, value)
289
313
  end
290
314
 
@@ -339,7 +363,11 @@ module Dynflow
339
363
  records = with_retry do
340
364
  filtered = table.filter(Utils.symbolize_keys(condition))
341
365
  # Filter out requested columns which the table doesn't have, load data just in case
342
- filtered = filtered.select(:data, *(table.columns & keys)) unless keys.nil?
366
+ unless keys.nil?
367
+ columns = table.columns & keys
368
+ columns |= [:data] if table.columns.include?(:data)
369
+ filtered = filtered.select(*columns)
370
+ end
343
371
  filtered.all
344
372
  end
345
373
  records = records.map { |record| load_data(record, what) }
@@ -355,11 +383,11 @@ module Dynflow
355
383
  hash = if record[:data].nil?
356
384
  SERIALIZABLE_COLUMNS.fetch(what, []).each do |key|
357
385
  key = key.to_sym
358
- record[key] = MultiJson.load(record[key]) unless record[key].nil?
386
+ record[key] = MessagePack.unpack((record[key])) unless record[key].nil?
359
387
  end
360
388
  record
361
389
  else
362
- MultiJson.load(record[:data])
390
+ MessagePack.unpack(record[:data])
363
391
  end
364
392
  Utils.indifferent_hash(hash)
365
393
  end
@@ -368,7 +396,7 @@ module Dynflow
368
396
  FileUtils.mkdir_p(backup_dir) unless File.directory?(backup_dir)
369
397
  end
370
398
 
371
- def backup_to_csv(dataset, backup_dir, file_name)
399
+ def backup_to_csv(table_name, dataset, backup_dir, file_name)
372
400
  ensure_backup_dir(backup_dir)
373
401
  csv_file = File.join(backup_dir, file_name)
374
402
  appending = File.exist?(csv_file)
@@ -376,7 +404,12 @@ module Dynflow
376
404
  File.open(csv_file, 'a') do |csv|
377
405
  csv << columns.to_csv unless appending
378
406
  dataset.each do |row|
379
- csv << columns.collect { |col| row[col] }.to_csv
407
+ values = columns.map do |col|
408
+ value = row[col]
409
+ value = value.unpack('H*').first if value && SERIALIZABLE_COLUMNS.fetch(table_name, []).include?(col.to_s)
410
+ value
411
+ end
412
+ csv << values.to_csv
380
413
  end
381
414
  end
382
415
  dataset
@@ -394,7 +427,8 @@ module Dynflow
394
427
 
395
428
  def dump_data(value)
396
429
  return if value.nil?
397
- MultiJson.dump Type!(value, Hash, Array, Integer)
430
+ packed = MessagePack.pack(Type!(value, Hash, Array, Integer, String))
431
+ ::Sequel.blob(packed)
398
432
  end
399
433
 
400
434
  def paginate(data_set, options)
@@ -477,3 +511,4 @@ module Dynflow
477
511
  end
478
512
  end
479
513
  end
514
+ # rubocop:enable Metrics/ClassLength
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+ Sequel.migration do
3
+ up do
4
+ type = database_type
5
+ create_table(:dynflow_output_chunks) do
6
+ primary_key :id
7
+
8
+ column_properties = if type.to_s.include?('postgres')
9
+ {type: :uuid}
10
+ else
11
+ {type: String, size: 36, fixed: true, null: false}
12
+ end
13
+ foreign_key :execution_plan_uuid, :dynflow_execution_plans, **column_properties
14
+ index :execution_plan_uuid
15
+
16
+ column :action_id, Integer, null: false
17
+ foreign_key [:execution_plan_uuid, :action_id], :dynflow_actions,
18
+ name: :dynflow_output_chunks_execution_plan_uuid_fkey1
19
+ index [:execution_plan_uuid, :action_id]
20
+
21
+ column :chunk, String, text: true
22
+ column :kind, String
23
+ column :timestamp, Time, null: false
24
+ end
25
+ end
26
+
27
+ down do
28
+ drop_table(:dynflow_output_chunks)
29
+ end
30
+ end
@@ -0,0 +1,90 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'multi_json'
4
+ require 'msgpack'
5
+
6
+ def table_pkeys(table)
7
+ case table
8
+ when :dynflow_execution_plans
9
+ [:uuid]
10
+ when :dynflow_actions, :dynflow_steps
11
+ [:execution_plan_uuid, :id]
12
+ when :dynflow_coordinator_records
13
+ [:id, :class]
14
+ when :dynflow_delayed_plans
15
+ [:execution_plan_uuid]
16
+ when :dynflow_envelopes
17
+ [:id]
18
+ when :dynflow_output_chunks
19
+ [:chunk]
20
+ else
21
+ raise "Unknown table '#{table}'"
22
+ end
23
+ end
24
+
25
+ def conditions_for_row(table, row)
26
+ row.slice(*table_pkeys(table))
27
+ end
28
+
29
+ def migrate_table(table, from_names, to_names, new_type)
30
+ alter_table(table) do
31
+ to_names.each do |new|
32
+ add_column new, new_type
33
+ end
34
+ end
35
+
36
+ relevant_columns = table_pkeys(table) | from_names
37
+
38
+ from(table).select(*relevant_columns).each do |row|
39
+ update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
40
+ row[from].nil? ? acc : acc.merge(to => yield(row[from]))
41
+ end
42
+ next if update.empty?
43
+ from(table).where(conditions_for_row(table, row)).update(update)
44
+ end
45
+
46
+ from_names.zip(to_names).each do |old, new|
47
+ alter_table(table) do
48
+ drop_column old
49
+ end
50
+
51
+ if database_type == :mysql
52
+ type = new_type == File ? 'blob' : 'mediumtext'
53
+ run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
54
+ else
55
+ rename_column table, new, old
56
+ end
57
+ end
58
+ end
59
+
60
+ Sequel.migration do
61
+
62
+ TABLES = {
63
+ :dynflow_actions => [:data, :input, :output],
64
+ :dynflow_coordinator_records => [:data],
65
+ :dynflow_delayed_plans => [:serialized_args, :data],
66
+ :dynflow_envelopes => [:data],
67
+ :dynflow_execution_plans => [:run_flow, :finalize_flow, :execution_history, :step_ids],
68
+ :dynflow_steps => [:error, :children],
69
+ :dynflow_output_chunks => [:chunk]
70
+ }
71
+
72
+ up do
73
+ TABLES.each do |table, columns|
74
+ new_columns = columns.map { |c| "#{c}_blob" }
75
+
76
+ migrate_table table, columns, new_columns, File do |data|
77
+ ::Sequel.blob(MessagePack.pack(MultiJson.load(data)))
78
+ end
79
+ end
80
+ end
81
+
82
+ down do
83
+ TABLES.each do |table, columns|
84
+ new_columns = columns.map { |c| c + '_text' }
85
+ migrate_table table, columns, new_columns, String do |data|
86
+ MultiJson.dump(MessagePack.unpack(data))
87
+ end
88
+ end
89
+ end
90
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+ tables = [:dynflow_actions, :dynflow_delayed_plans, :dynflow_steps, :dynflow_output_chunks]
3
+ Sequel.migration do
4
+ up do
5
+ if database_type == :sqlite && Gem::Version.new(SQLite3::SQLITE_VERSION) <= Gem::Version.new('3.7.17')
6
+ tables.each do |table|
7
+ alter_table(table) { drop_foreign_key [:execution_plan_uuid] }
8
+ end
9
+ end
10
+ end
11
+
12
+ down do
13
+ if database_type == :sqlite && Gem::Version.new(SQLite3::SQLITE_VERSION) <= Gem::Version.new('3.7.17')
14
+ tables.each do |table|
15
+ alter_table(table) { add_foreign_key [:execution_plan_uuid], :dynflow_execution_plans }
16
+ end
17
+ end
18
+ end
19
+ end
@@ -45,12 +45,12 @@ module Dynflow
45
45
  if values.size == 1
46
46
  value = values.first
47
47
  case value
48
- when String, Numeric, Symbol, TrueClass, FalseClass, NilClass, Time
49
- value
50
48
  when Hash
51
49
  value.inject({}) { |h, (k, v)| h.update k => recursive_to_hash(v) }
52
50
  when Array
53
51
  value.map { |v| recursive_to_hash v }
52
+ when ->(v) { v.respond_to?(:to_msgpack) }
53
+ value
54
54
  else
55
55
  value.to_hash
56
56
  end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+ module Dynflow
3
+ module Testing
4
+ class DummyCoordinator
5
+ def find_records(*args)
6
+ []
7
+ end
8
+ end
9
+ end
10
+ end
@@ -15,6 +15,10 @@ module Dynflow
15
15
  @plan_input = args
16
16
  self
17
17
  end
18
+
19
+ def run_step_id
20
+ @run_step_id ||= Testing.get_id
21
+ end
18
22
  end
19
23
  end
20
24
  end
@@ -5,7 +5,7 @@ module Dynflow
5
5
  extend Mimic
6
6
  mimic! World
7
7
 
8
- attr_reader :clock, :executor, :middleware
8
+ attr_reader :clock, :executor, :middleware, :coordinator
9
9
  attr_accessor :action
10
10
 
11
11
  def initialize(_config = nil)
@@ -13,6 +13,7 @@ module Dynflow
13
13
  @clock = ManagedClock.new
14
14
  @executor = DummyExecutor.new(self)
15
15
  @middleware = Middleware::World.new
16
+ @coordinator = DummyCoordinator.new
16
17
  end
17
18
 
18
19
  def action_logger
@@ -19,6 +19,7 @@ module Dynflow
19
19
 
20
20
  require 'dynflow/testing/mimic'
21
21
  require 'dynflow/testing/managed_clock'
22
+ require 'dynflow/testing/dummy_coordinator'
22
23
  require 'dynflow/testing/dummy_world'
23
24
  require 'dynflow/testing/dummy_executor'
24
25
  require 'dynflow/testing/dummy_execution_plan'
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
- VERSION = '1.5.0'
3
+ VERSION = '1.6.1'
4
4
  end
data/lib/dynflow/world.rb CHANGED
@@ -200,6 +200,14 @@ module Dynflow
200
200
  Scheduled[execution_plan.id]
201
201
  end
202
202
 
203
+ def plan_elsewhere(action_class, *args)
204
+ execution_plan = ExecutionPlan.new(self, nil)
205
+ execution_plan.delay(nil, action_class, {}, *args)
206
+ plan_request(execution_plan.id)
207
+
208
+ Scheduled[execution_plan.id]
209
+ end
210
+
203
211
  def plan(action_class, *args)
204
212
  plan_with_options(action_class: action_class, args: args)
205
213
  end
@@ -227,6 +235,10 @@ module Dynflow
227
235
  publish_request(Dispatcher::Event[execution_plan_id, step_id, event, time, optional], accepted, false)
228
236
  end
229
237
 
238
+ def plan_request(execution_plan_id, done = Concurrent::Promises.resolvable_future)
239
+ publish_request(Dispatcher::Planning[execution_plan_id], done, false)
240
+ end
241
+
230
242
  def ping(world_id, timeout, done = Concurrent::Promises.resolvable_future)
231
243
  publish_request(Dispatcher::Ping[world_id, true], done, false, timeout)
232
244
  end
data/lib/dynflow.rb CHANGED
@@ -72,6 +72,7 @@ module Dynflow
72
72
  require 'dynflow/throttle_limiter'
73
73
  require 'dynflow/telemetry'
74
74
  require 'dynflow/config'
75
+ require 'dynflow/extensions'
75
76
 
76
77
  if defined? ::ActiveJob
77
78
  require 'dynflow/active_job/queue_adapter'
@@ -60,6 +60,18 @@ module Dynflow
60
60
  execution_plan_hooks.use :raise_flag_root_only, :on => :stopped
61
61
  end
62
62
 
63
+ class PendingAction < ::Dynflow::Action
64
+ include FlagHook
65
+
66
+ execution_plan_hooks.use :raise_flag, :on => :pending
67
+ end
68
+
69
+ class AllTransitionsAction < ::Dynflow::Action
70
+ include FlagHook
71
+
72
+ execution_plan_hooks.use :raise_flag
73
+ end
74
+
63
75
  class ComposedAction < RootOnlyAction
64
76
  def plan
65
77
  plan_action(RootOnlyAction)
@@ -161,6 +173,30 @@ module Dynflow
161
173
  plan.finished.wait!
162
174
  _(Flag.raised_count).must_equal 1
163
175
  end
176
+
177
+ it 'runs the pending hooks when execution plan is created' do
178
+ refute Flag.raised?
179
+ plan = world.trigger(PendingAction)
180
+ plan.finished.wait!
181
+ _(Flag.raised_count).must_equal 1
182
+ end
183
+
184
+ it 'runs the pending hooks when execution plan is created' do
185
+ refute Flag.raised?
186
+ delay = world.delay(PendingAction, { :start_at => Time.now.utc + 180 })
187
+ delayed_plan = world.persistence.load_delayed_plan(delay.execution_plan_id)
188
+ delayed_plan.execution_plan.cancel.each(&:wait)
189
+ _(Flag.raised_count).must_equal 1
190
+ end
191
+
192
+ it 'runs the hook on every state transition' do
193
+ refute Flag.raised?
194
+ plan = world.trigger(AllTransitionsAction)
195
+ plan.finished.wait!
196
+ # There should be 5 transitions
197
+ # nothing -> pending -> planning -> planned -> running -> stopped
198
+ _(Flag.raised_count).must_equal 5
199
+ end
164
200
  end
165
201
  end
166
202
  end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+ require_relative 'test_helper'
3
+ require 'active_support/time'
4
+
5
+ module Dynflow
6
+ module ExtensionsTest
7
+ describe 'msgpack extensions' do
8
+ before do
9
+ Thread.current[:time_zone] = ActiveSupport::TimeZone['Europe/Prague']
10
+ end
11
+ after { Thread.current[:time_zone] = nil }
12
+
13
+ it 'allows {de,}serializing Time' do
14
+ time = Time.now
15
+ transformed = MessagePack.unpack(time.to_msgpack)
16
+ assert_equal transformed, time
17
+ assert_equal transformed.class, time.class
18
+ end
19
+
20
+ it 'allows {de,}serializing ActiveSupport::TimeWithZone' do
21
+ time = Time.zone.now
22
+ transformed = MessagePack.unpack(time.to_msgpack)
23
+ assert_equal transformed, time
24
+ assert_equal transformed.class, time.class
25
+ end
26
+
27
+ it 'allows {de,}serializing DateTime' do
28
+ time = DateTime.now
29
+ transformed = MessagePack.unpack(time.to_msgpack)
30
+ assert_equal transformed, time
31
+ assert_equal transformed.class, time.class
32
+ end
33
+
34
+ it 'allows {de,}serializing Date' do
35
+ date = DateTime.current
36
+ transformed = MessagePack.unpack(date.to_msgpack)
37
+ assert_equal transformed, date
38
+ assert_equal transformed.class, date.class
39
+ end
40
+ end
41
+ end
42
+ end
@@ -29,14 +29,17 @@ module Dynflow
29
29
  describe 'abstract executor' do
30
30
  let(:abstract_delayed_executor) { DelayedExecutors::AbstractCore.new(world) }
31
31
 
32
- it 'handles wrong plan state' do
32
+ it 'handles plan in planning state' do
33
33
  delayed_plan.execution_plan.state = :planning
34
34
  abstract_delayed_executor.send(:process, [delayed_plan], @start_at)
35
- _(delayed_plan.execution_plan.state).must_equal :planned
35
+ _(delayed_plan.execution_plan.state).must_equal :scheduled
36
+ end
36
37
 
38
+ it 'handles plan in running state' do
37
39
  delayed_plan.execution_plan.set_state(:running, true)
38
40
  abstract_delayed_executor.send(:process, [delayed_plan], @start_at)
39
41
  _(delayed_plan.execution_plan.state).must_equal :running
42
+ _(world.persistence.load_delayed_plan(delayed_plan.execution_plan_uuid)).must_be :nil?
40
43
  end
41
44
  end
42
45
 
@@ -55,7 +58,7 @@ module Dynflow
55
58
 
56
59
  it 'delays the action' do
57
60
  _(execution_plan.steps.count).must_equal 1
58
- _(delayed_plan.start_at).must_be_within_delta(@start_at, 0.5)
61
+ _(delayed_plan.start_at.to_i).must_equal(@start_at.to_i)
59
62
  _(history_names.call(execution_plan)).must_equal ['delay']
60
63
  end
61
64
 
@@ -31,6 +31,7 @@
31
31
  <% end %>
32
32
  <%= show_action_data("Input:", action.input) %>
33
33
  <%= show_action_data("Output:", action.output) %>
34
+ <%= show_action_data("Chunked output:", action.stored_output_chunks) %>
34
35
  <% if step.error %>
35
36
  <p>
36
37
  <b>Error:</b>
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dynflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.0
4
+ version: 1.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Necas
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2021-05-13 00:00:00.000000000 Z
12
+ date: 2021-09-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: multi_json
@@ -25,6 +25,20 @@ dependencies:
25
25
  - - ">="
26
26
  - !ruby/object:Gem::Version
27
27
  version: '0'
28
+ - !ruby/object:Gem::Dependency
29
+ name: msgpack
30
+ requirement: !ruby/object:Gem::Requirement
31
+ requirements:
32
+ - - "~>"
33
+ - !ruby/object:Gem::Version
34
+ version: 1.3.3
35
+ type: :runtime
36
+ prerelease: false
37
+ version_requirements: !ruby/object:Gem::Requirement
38
+ requirements:
39
+ - - "~>"
40
+ - !ruby/object:Gem::Version
41
+ version: 1.3.3
28
42
  - !ruby/object:Gem::Dependency
29
43
  name: apipie-params
30
44
  requirement: !ruby/object:Gem::Requirement
@@ -387,6 +401,7 @@ files:
387
401
  - doc/pages/source/projects/index.md
388
402
  - docker-compose.yml
389
403
  - dynflow.gemspec
404
+ - examples/chunked_output_benchmark.rb
390
405
  - examples/clock_benchmark.rb
391
406
  - examples/example_helper.rb
392
407
  - examples/future_execution.rb
@@ -398,6 +413,7 @@ files:
398
413
  - examples/sub_plan_concurrency_control.rb
399
414
  - examples/sub_plans.rb
400
415
  - examples/termination.rb
416
+ - extras/expand/main.go
401
417
  - extras/statsd_mapping.conf
402
418
  - lib/dynflow.rb
403
419
  - lib/dynflow/action.rb
@@ -470,6 +486,8 @@ files:
470
486
  - lib/dynflow/executors/sidekiq/redis_locking.rb
471
487
  - lib/dynflow/executors/sidekiq/serialization.rb
472
488
  - lib/dynflow/executors/sidekiq/worker_jobs.rb
489
+ - lib/dynflow/extensions.rb
490
+ - lib/dynflow/extensions/msgpack.rb
473
491
  - lib/dynflow/flows.rb
474
492
  - lib/dynflow/flows/abstract.rb
475
493
  - lib/dynflow/flows/abstract_composed.rb
@@ -515,6 +533,9 @@ files:
515
533
  - lib/dynflow/persistence_adapters/sequel_migrations/018_add_uuid_column.rb
516
534
  - lib/dynflow/persistence_adapters/sequel_migrations/019_update_mysql_time_precision.rb
517
535
  - lib/dynflow/persistence_adapters/sequel_migrations/020_drop_duplicate_indices.rb
536
+ - lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb
537
+ - lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb
538
+ - lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb
518
539
  - lib/dynflow/rails.rb
519
540
  - lib/dynflow/rails/configuration.rb
520
541
  - lib/dynflow/rails/daemon.rb
@@ -536,6 +557,7 @@ files:
536
557
  - lib/dynflow/telemetry_adapters/statsd.rb
537
558
  - lib/dynflow/testing.rb
538
559
  - lib/dynflow/testing/assertions.rb
560
+ - lib/dynflow/testing/dummy_coordinator.rb
539
561
  - lib/dynflow/testing/dummy_execution_plan.rb
540
562
  - lib/dynflow/testing/dummy_executor.rb
541
563
  - lib/dynflow/testing/dummy_planned_action.rb
@@ -578,6 +600,7 @@ files:
578
600
  - test/execution_plan_hooks_test.rb
579
601
  - test/execution_plan_test.rb
580
602
  - test/executor_test.rb
603
+ - test/extensions_test.rb
581
604
  - test/flows_test.rb
582
605
  - test/future_execution_test.rb
583
606
  - test/memory_cosumption_watcher_test.rb
@@ -661,6 +684,7 @@ test_files:
661
684
  - test/execution_plan_hooks_test.rb
662
685
  - test/execution_plan_test.rb
663
686
  - test/executor_test.rb
687
+ - test/extensions_test.rb
664
688
  - test/flows_test.rb
665
689
  - test/future_execution_test.rb
666
690
  - test/memory_cosumption_watcher_test.rb