taskinator 0.0.18 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +25 -0
  3. data/Gemfile.lock +28 -28
  4. data/README.md +29 -0
  5. data/Rakefile +5 -0
  6. data/bin/console +5 -0
  7. data/lib/taskinator/create_process_worker.rb +5 -2
  8. data/lib/taskinator/definition/builder.rb +12 -7
  9. data/lib/taskinator/definition.rb +36 -28
  10. data/lib/taskinator/executor.rb +4 -4
  11. data/lib/taskinator/job_worker.rb +5 -10
  12. data/lib/taskinator/logger.rb +1 -0
  13. data/lib/taskinator/persistence.rb +74 -36
  14. data/lib/taskinator/process.rb +75 -49
  15. data/lib/taskinator/queues/delayed_job.rb +1 -11
  16. data/lib/taskinator/queues/resque.rb +0 -15
  17. data/lib/taskinator/queues/sidekiq.rb +1 -14
  18. data/lib/taskinator/queues.rb +0 -5
  19. data/lib/taskinator/redis_connection.rb +1 -0
  20. data/lib/taskinator/task.rb +57 -57
  21. data/lib/taskinator/task_worker.rb +1 -8
  22. data/lib/taskinator/version.rb +1 -1
  23. data/lib/taskinator.rb +7 -6
  24. data/spec/examples/queue_adapter_examples.rb +0 -10
  25. data/spec/support/test_definition.rb +4 -0
  26. data/spec/support/test_flow.rb +2 -0
  27. data/spec/support/test_flows.rb +54 -3
  28. data/spec/support/test_queue.rb +41 -6
  29. data/spec/taskinator/create_process_worker_spec.rb +12 -3
  30. data/spec/taskinator/definition/builder_spec.rb +39 -9
  31. data/spec/taskinator/definition_spec.rb +19 -27
  32. data/spec/taskinator/executor_spec.rb +19 -1
  33. data/spec/taskinator/job_worker_spec.rb +0 -11
  34. data/spec/taskinator/persistence_spec.rb +122 -7
  35. data/spec/taskinator/process_spec.rb +39 -23
  36. data/spec/taskinator/queues/delayed_job_spec.rb +1 -19
  37. data/spec/taskinator/queues/resque_spec.rb +1 -22
  38. data/spec/taskinator/queues/sidekiq_spec.rb +1 -20
  39. data/spec/taskinator/task_spec.rb +160 -52
  40. data/spec/taskinator/task_worker_spec.rb +0 -17
  41. data/spec/taskinator/test_flows_spec.rb +43 -0
  42. metadata +2 -5
  43. data/lib/taskinator/process_worker.rb +0 -21
  44. data/spec/taskinator/process_worker_spec.rb +0 -51
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: bd4b644d746cddfe50ed5d3c9d984bcd978d3fee
4
- data.tar.gz: 45f8166dbd3f7f1b5e27d52c31c7e0384a433cc7
3
+ metadata.gz: 1905dc0aad01e4bd348ad38a40100df9649e3a18
4
+ data.tar.gz: d1e4aa938c9d3a0d56fc4090f71024dec84e32fd
5
5
  SHA512:
6
- metadata.gz: 34d036f6fedd66b61b73c6530716882a9b019a769431c719273e0b684e216838571cc3c49a7317292662f8dc41b1ff379c0c41480effb3f705ee0e8752a551de
7
- data.tar.gz: 9fa6364a483ec5d361881b07f26e266ebebbfba07f1b44cb73470c5658e048448f38eece7d531cbb3785219253f83233b75e5589a05eedf06c1eadaeff74fff3
6
+ metadata.gz: 6cd0c7d2fa8d1f133ddce568ae969fe488401ba9f6311c39143456749b4eea1429edbc63ec080a5a8b60fb84bf312bdcd5758084269b16ae83b7b437877e15d1
7
+ data.tar.gz: dfcbeced7346f8c0bdcc70e5814e2b2e943ecd10182ac0285a88e8e0bdfc0eb1a227c900390328861bb4012e4feffbf8fe7d41033583d68eb1f25d198f1ef027
data/CHANGELOG.md CHANGED
@@ -1,3 +1,28 @@
1
+ v0.2.0 - 31 Jul 2015
2
+ ---
3
+ Bug fix for `create_process_remotely` so that it returns the process uuid instead of nil.
4
+ Removed reload functionality, since it isn't used anymore
5
+ Added missing instrumentation events for task, job and subprocess completed events.
6
+ Bug fix for when `sequential` or `concurrent` steps don't have any tasks to still continue processing.
7
+ Refactoring to remove dead code and "reload" functionality.
8
+ Improvements to console and rake to use console instrumenter.
9
+ Consolidation of instrumentation events. Added `type` to payload.
10
+ Improvements to error handling.
11
+
12
+ v0.1.1 - 23 Jul 2015 [Yanked]
13
+ ---
14
+ Bug fix for option parameter handling.
15
+
16
+ v0.1.0 - 23 Jul 2015 [Yanked]
17
+ ---
18
+ Fixed issue with persistence of options passed to `create_process` on the respective `Process` and `Task` instances.
19
+ Improvements to process creation logic.
20
+ Namespaced instrumentation event names.
21
+ Added process completed, cancelled and failed instrumentation events.
22
+ Include additional data in the instrumentation payload. E.g. Process options and percentages.
23
+ Refactored the way processes/tasks get queued, to prevent unnecessary queuing of contained processes/tasks.
24
+ Removed `ProcessWorker` since it isn't needed anymore.
25
+
1
26
  v0.0.18 - 14 Jul 2015
2
27
  ---
3
28
  Fixed issue with `Taskinator::Api::Processes#each` method, which was causing a Segmentation fault.
data/Gemfile.lock CHANGED
@@ -8,7 +8,7 @@ GIT
8
8
  PATH
9
9
  remote: .
10
10
  specs:
11
- taskinator (0.0.18)
11
+ taskinator (0.2.0)
12
12
  connection_pool (>= 2.2.0)
13
13
  json (>= 1.8.2)
14
14
  redis (>= 3.2.1)
@@ -17,20 +17,20 @@ PATH
17
17
  GEM
18
18
  remote: https://rubygems.org/
19
19
  specs:
20
- activesupport (4.2.1)
20
+ activesupport (4.2.3)
21
21
  i18n (~> 0.7)
22
22
  json (~> 1.7, >= 1.7.7)
23
23
  minitest (~> 5.1)
24
24
  thread_safe (~> 0.3, >= 0.3.4)
25
25
  tzinfo (~> 1.1)
26
- byebug (4.0.5)
26
+ byebug (5.0.0)
27
27
  columnize (= 0.9.0)
28
28
  celluloid (0.16.0)
29
29
  timers (~> 4.0.0)
30
30
  coderay (1.1.0)
31
31
  columnize (0.9.0)
32
32
  connection_pool (2.2.0)
33
- coveralls (0.8.1)
33
+ coveralls (0.8.2)
34
34
  json (~> 1.8)
35
35
  rest-client (>= 1.6.8, < 2)
36
36
  simplecov (~> 0.10.0)
@@ -46,21 +46,21 @@ GEM
46
46
  http-cookie (1.0.2)
47
47
  domain_name (~> 0.5)
48
48
  i18n (0.7.0)
49
- json (1.8.2)
49
+ json (1.8.3)
50
50
  method_source (0.8.2)
51
51
  mime-types (2.6.1)
52
52
  minitest (5.7.0)
53
53
  mono_logger (1.1.0)
54
- multi_json (1.11.0)
54
+ multi_json (1.11.2)
55
55
  netrc (0.10.3)
56
56
  pry (0.10.1)
57
57
  coderay (~> 1.1.0)
58
58
  method_source (~> 0.8.1)
59
59
  slop (~> 3.4)
60
- pry-byebug (3.1.0)
61
- byebug (~> 4.0)
60
+ pry-byebug (3.2.0)
61
+ byebug (~> 5.0)
62
62
  pry (~> 0.10)
63
- rack (1.6.1)
63
+ rack (1.6.4)
64
64
  rack-protection (1.5.3)
65
65
  rack
66
66
  rake (10.4.2)
@@ -82,28 +82,28 @@ GEM
82
82
  http-cookie (>= 1.0.2, < 2.0)
83
83
  mime-types (>= 1.16, < 3.0)
84
84
  netrc (~> 0.7)
85
- rspec (3.2.0)
86
- rspec-core (~> 3.2.0)
87
- rspec-expectations (~> 3.2.0)
88
- rspec-mocks (~> 3.2.0)
89
- rspec-core (3.2.3)
90
- rspec-support (~> 3.2.0)
91
- rspec-expectations (3.2.1)
85
+ rspec (3.3.0)
86
+ rspec-core (~> 3.3.0)
87
+ rspec-expectations (~> 3.3.0)
88
+ rspec-mocks (~> 3.3.0)
89
+ rspec-core (3.3.2)
90
+ rspec-support (~> 3.3.0)
91
+ rspec-expectations (3.3.1)
92
92
  diff-lcs (>= 1.2.0, < 2.0)
93
- rspec-support (~> 3.2.0)
94
- rspec-mocks (3.2.1)
93
+ rspec-support (~> 3.3.0)
94
+ rspec-mocks (3.3.2)
95
95
  diff-lcs (>= 1.2.0, < 2.0)
96
- rspec-support (~> 3.2.0)
96
+ rspec-support (~> 3.3.0)
97
97
  rspec-sidekiq (2.0.0)
98
98
  rspec (~> 3.0, >= 3.0.0)
99
99
  sidekiq (>= 2.4.0)
100
- rspec-support (3.2.2)
101
- sidekiq (3.3.4)
102
- celluloid (>= 0.16.0)
103
- connection_pool (>= 2.1.1)
104
- json
105
- redis (>= 3.0.6)
106
- redis-namespace (>= 1.3.1)
100
+ rspec-support (3.3.0)
101
+ sidekiq (3.4.2)
102
+ celluloid (~> 0.16.0)
103
+ connection_pool (~> 2.2, >= 2.2.0)
104
+ json (~> 1.0)
105
+ redis (~> 3.2, >= 3.2.1)
106
+ redis-namespace (~> 1.5, >= 1.5.2)
107
107
  simplecov (0.10.0)
108
108
  docile (~> 1.1.0)
109
109
  json (~> 1.8)
@@ -114,14 +114,14 @@ GEM
114
114
  rack-protection (~> 1.4)
115
115
  tilt (>= 1.3, < 3)
116
116
  slop (3.6.0)
117
- term-ansicolor (1.3.0)
117
+ term-ansicolor (1.3.2)
118
118
  tins (~> 1.0)
119
119
  thor (0.19.1)
120
120
  thread_safe (0.3.5)
121
121
  tilt (2.0.1)
122
122
  timers (4.0.1)
123
123
  hitimes
124
- tins (1.5.2)
124
+ tins (1.5.4)
125
125
  tzinfo (1.2.2)
126
126
  thread_safe (~> 0.1)
127
127
  unf (0.1.4)
data/README.md CHANGED
@@ -606,6 +606,35 @@ Taskinator.configure do |config|
606
606
  end
607
607
  ```
608
608
 
609
+ The following instrumentation events are issued:
610
+
611
+ | Event | When |
612
+ |------------------------------------|---------------------------------------------|
613
+ | `taskinator.process.created` | After a process gets created |
614
+ | `taskinator.process.saved` | After a process has been persisted to Redis |
615
+ | `taskinator.process.enqueued` | After a process is enqueued for processing |
616
+ | `taskinator.process.completed` | After a process has completed processing |
617
+ | `taskinator.process.cancelled` | After a process has been cancelled |
618
+ | `taskinator.process.failed` | After a process has failed |
619
+ | `taskinator.task.enqueued` | After a task has been enqueued |
620
+ | `taskinator.task.executed` | After a task has executed |
621
+ | `taskinator.job.enqueued` | After a job has been enqueued |
622
+ | `taskinator.job.executed` | After a job has executed |
623
+ | `taskinator.subprocess.enqueued` | After a sub process has been enqueued |
624
+ | `taskinator.subprocess.executed` | After a sub process has executed |
625
+
626
+ For all events, the data included contains the following information:
627
+
628
+ | Key | Value |
629
+ |--------------------------|-------------------------------------------------------|
630
+ | `:process_uuid` | The UUID of the root process |
631
+ | `:process_options` | Options hash of the root process |
632
+ | `:uuid` | The UUID of the respective task, job or sub process |
633
+ | `:tasks_count` | The total count of tasks for the given process |
634
+ | `:percentage_completed` | The percentage of completed tasks |
635
+ | `:percentage_failed` | The percentage of failed tasks |
636
+ | `:percentage_cancelled` | The percentage of cancelled tasks |
637
+
609
638
  ## Notes
610
639
 
611
640
  The persistence logic is decoupled from the implementation, so it is possible to implement another backing store if required.
data/Rakefile CHANGED
@@ -9,3 +9,8 @@ task :default => :spec
9
9
  require 'resque'
10
10
  require 'resque/tasks'
11
11
  require 'taskinator'
12
+
13
+ Taskinator.configure do |config|
14
+ config.logger.level = 0 # DEBUG
15
+ config.instrumenter = Taskinator::ConsoleInstrumenter.new
16
+ end
data/bin/console CHANGED
@@ -7,5 +7,10 @@ require "taskinator"
7
7
  # You can add fixtures and/or initialization code here to make experimenting
8
8
  # with your gem easier. You can also use a different console, if you like.
9
9
 
10
+ Taskinator.configure do |config|
11
+ config.logger.level = 0 # DEBUG
12
+ config.instrumenter = Taskinator::ConsoleInstrumenter.new
13
+ end
14
+
10
15
  require "pry"
11
16
  Pry.start
@@ -19,13 +19,16 @@ module Taskinator
19
19
  end
20
20
 
21
21
  def perform
22
- @definition._create_process_(*@args, :uuid => @uuid).enqueue!
22
+ @definition._create_process_(false, *@args, :uuid => @uuid).enqueue!
23
23
  end
24
24
 
25
25
  private
26
26
 
27
- # borrowed from activesupport/lib/active_support/inflector/methods.rb
27
+ # :nocov:
28
28
  def constantize(camel_cased_word)
29
+
30
+ # borrowed from activesupport/lib/active_support/inflector/methods.rb
31
+
29
32
  names = camel_cased_word.split('::')
30
33
 
31
34
  # Trigger a built-in NameError exception including the ill-formed constant in the message.
@@ -5,18 +5,18 @@ module Taskinator
5
5
  attr_reader :process
6
6
  attr_reader :definition
7
7
  attr_reader :args
8
- attr_reader :options
8
+ attr_reader :builder_options
9
9
 
10
10
  def initialize(process, definition, *args)
11
11
  @process = process
12
12
  @definition = definition
13
+ @builder_options = args.last.is_a?(Hash) ? args.pop : {}
13
14
  @args = args
14
- @options = args.last.is_a?(Hash) ? args.last : {}
15
15
  @executor = Taskinator::Executor.new(@definition)
16
16
  end
17
17
 
18
18
  def option?(key, &block)
19
- yield if @options.key?(key) && @options[key]
19
+ yield if builder_options.key?(key) && builder_options[key]
20
20
  end
21
21
 
22
22
  # defines a sub process of tasks which are executed sequentially
@@ -81,7 +81,7 @@ module Taskinator
81
81
 
82
82
  # TODO: decide whether the sub process to dynamically receive arguments
83
83
 
84
- sub_process = definition.create_sub_process(*@args)
84
+ sub_process = definition.create_sub_process(*@args, combine_options(options))
85
85
  Builder.new(define_sub_process_task(@process, sub_process, options), definition, *@args)
86
86
  end
87
87
 
@@ -89,19 +89,19 @@ module Taskinator
89
89
 
90
90
  def define_step_task(process, method, args, options={})
91
91
  define_task(process) {
92
- Task.define_step_task(process, method, args, options)
92
+ Task.define_step_task(process, method, args, combine_options(options))
93
93
  }
94
94
  end
95
95
 
96
96
  def define_job_task(process, job, args, options={})
97
97
  define_task(process) {
98
- Task.define_job_task(process, job, args, options)
98
+ Task.define_job_task(process, job, args, combine_options(options))
99
99
  }
100
100
  end
101
101
 
102
102
  def define_sub_process_task(process, sub_process, options={})
103
103
  define_task(process) {
104
- Task.define_sub_process_task(process, sub_process, options)
104
+ Task.define_sub_process_task(process, sub_process, combine_options(options))
105
105
  }
106
106
  sub_process
107
107
  end
@@ -110,6 +110,11 @@ module Taskinator
110
110
  process.tasks << task = yield
111
111
  task
112
112
  end
113
+
114
+ def combine_options(options={})
115
+ builder_options.merge(options)
116
+ end
117
+
113
118
  end
114
119
  end
115
120
  end
@@ -33,42 +33,49 @@ module Taskinator
33
33
  Process.define_sequential_process_for(definition, options)
34
34
  }
35
35
 
36
- define_singleton_method :_create_process_ do |*args|
36
+ # called from respective "create_process" methods
37
+ # parameters can contain options as the last parameter
38
+ define_singleton_method :_create_process_ do |subprocess, *args|
39
+ begin
37
40
 
38
- options = args.last.is_a?(Hash) ? args.pop : {}
41
+ # TODO: better validation of arguments
39
42
 
40
- # TODO: better validation of arguments
43
+ # FIXME: arg_list should only contain an array of symbols
41
44
 
42
- # FIXME: arg_list should only contain an array of symbols
45
+ raise ArgumentError, "wrong number of arguments (#{args.length} for #{arg_list.length})" if args.length < arg_list.length
43
46
 
44
- raise ArgumentError, "wrong number of arguments (#{args.length} for #{arg_list.length})" if args.length < arg_list.length
47
+ options = (args.last.is_a?(Hash) ? args.last : {})
48
+ process = factory.call(self, options)
45
49
 
46
- process = factory.call(self, options)
50
+ # this may take long... up to users definition
51
+ Taskinator.instrumenter.instrument('taskinator.process.created', :uuid => process.uuid) do
52
+ Builder.new(process, self, *args).instance_eval(&block)
53
+ end
47
54
 
48
- # this may take long... up to users definition
49
- Taskinator.instrumenter.instrument(:create_process, :uuid => process.uuid) do
50
- Builder.new(process, self, *args).instance_eval(&block)
51
- end
55
+ # only save "root processes"
56
+ unless subprocess
52
57
 
53
- # only save "root processes"
54
- unless options[:subprocess]
58
+ # instrument separately
59
+ Taskinator.instrumenter.instrument('taskinator.process.saved', :uuid => process.uuid) do
55
60
 
56
- # instrument separately
57
- Taskinator.instrumenter.instrument(:save_process, :uuid => process.uuid) do
61
+ # this will visit "sub processes" and persist them too
62
+ process.save
58
63
 
59
- # this will visit "sub processes" and persist them too
60
- process.save
64
+ # add it to the list of "root processes"
65
+ Persistence.add_process_to_list(process)
61
66
 
62
- # add it to the list of "root processes"
63
- Persistence.add_process_to_list(process)
67
+ end
64
68
 
65
69
  end
66
70
 
67
- end
68
-
69
- # this is the "root" process
70
- process
71
+ # this is the "root" process
72
+ process
71
73
 
74
+ rescue => e
75
+ Taskinator.logger.error(e)
76
+ Taskinator.logger.debug(e.backtrace)
77
+ raise e
78
+ end
72
79
  end
73
80
  end
74
81
 
@@ -80,25 +87,26 @@ module Taskinator
80
87
  #
81
88
  def create_process(*args)
82
89
  assert_valid_process_module
83
- _create_process_(*args, :sub_process => false)
90
+ _create_process_(false, *args)
84
91
  end
85
92
 
86
93
  #
87
- # returns a placeholder process, with the uuid attribute of the
88
- # actual process. the callee can call `reload` if required to
89
- # get the actual process, once it has been built by the CreateProcessWorker
94
+ # returns the process uuid of the process to be created
95
+ # the process can be retrieved using this uuid by using
96
+ # Taskinator::Process.fetch(uuid)
90
97
  #
91
98
  def create_process_remotely(*args)
92
99
  assert_valid_process_module
93
100
  uuid = SecureRandom.uuid
101
+
94
102
  Taskinator.queue.enqueue_create_process(self, uuid, args)
95
103
 
96
- Taskinator::Persistence::LazyLoader.new(Taskinator::Process, uuid, uuid)
104
+ return uuid
97
105
  end
98
106
 
99
107
  def create_sub_process(*args)
100
108
  assert_valid_process_module
101
- _create_process_(*args, :subprocess => true)
109
+ _create_process_(true, *args)
102
110
  end
103
111
 
104
112
  private
@@ -13,16 +13,16 @@ module Taskinator
13
13
  eigen.send(:include, definition)
14
14
  end
15
15
 
16
- def root_key
17
- @root_key ||= task.root_key
16
+ def process_uuid
17
+ task.process_uuid if task
18
18
  end
19
19
 
20
20
  def uuid
21
- task.uuid
21
+ task.uuid if task
22
22
  end
23
23
 
24
24
  def options
25
- task.options
25
+ task.options if task
26
26
  end
27
27
 
28
28
  end
@@ -1,22 +1,17 @@
1
1
  module Taskinator
2
2
  class JobWorker
3
+ attr_reader :uuid
4
+
3
5
  def initialize(uuid)
4
6
  @uuid = uuid
5
7
  end
6
8
 
7
- # NB: must be provided a block for the implmentation of the job execution
9
+ # NB: must be provided a block for the implementation of the job execution
8
10
  def perform(&block)
9
11
  task = Taskinator::Task.fetch(@uuid)
10
12
  return if task.paused? || task.cancelled?
11
- begin
12
- task.start!
13
- task.perform(&block)
14
- task.complete!
15
- rescue => e
16
- Taskinator.logger.error(e)
17
- task.fail!(e)
18
- raise e
19
- end
13
+ task.start!
14
+ task.perform(&block)
20
15
  end
21
16
  end
22
17
  end
@@ -8,6 +8,7 @@
8
8
  require 'time'
9
9
  require 'logger'
10
10
 
11
+ # :nocov:
11
12
  module Taskinator
12
13
  module Logging
13
14
 
@@ -40,10 +40,10 @@ module Taskinator
40
40
  # querying for the status of an instance
41
41
  def state_for(uuid)
42
42
  key = key_for(uuid)
43
- Taskinator.redis do |conn|
44
- state = conn.hget(key, :state) || 'initial'
45
- state.to_sym
43
+ state = Taskinator.redis do |conn|
44
+ conn.hget(key, :state) || 'initial'
46
45
  end
46
+ state.to_sym
47
47
  end
48
48
 
49
49
  # fetches the instance for given identifier
@@ -67,7 +67,7 @@ module Taskinator
67
67
  conn.multi do
68
68
  visitor = RedisSerializationVisitor.new(conn, self).visit
69
69
  conn.hmset(
70
- "taskinator:#{self.key}",
70
+ Taskinator::Process.key_for(uuid),
71
71
  :tasks_count, visitor.task_count,
72
72
  :tasks_failed, 0,
73
73
  :tasks_completed, 0,
@@ -78,19 +78,23 @@ module Taskinator
78
78
  end
79
79
  end
80
80
 
81
- # this is the persistence key
81
+ # the persistence key
82
82
  def key
83
83
  @key ||= self.class.key_for(self.uuid)
84
84
  end
85
85
 
86
- # retrieves the root key associated
87
- # with the process or task
88
- def root_key
89
- @root_key ||= Taskinator.redis do |conn|
90
- conn.hget(self.key, :root_key)
86
+ # the root process uuid associated with this process or task
87
+ def process_uuid
88
+ @process_uuid ||= Taskinator.redis do |conn|
89
+ conn.hget(self.key, :process_uuid)
91
90
  end
92
91
  end
93
92
 
93
+ # the root process persistence key associated with this process or task
94
+ def process_key
95
+ @process_key ||= Taskinator::Process.key_for(process_uuid)
96
+ end
97
+
94
98
  # retrieves the workflow state
95
99
  # this method is called from the workflow gem
96
100
  def load_workflow_state
@@ -135,9 +139,10 @@ module Taskinator
135
139
 
136
140
  def tasks_count
137
141
  @tasks_count ||= begin
138
- Taskinator.redis do |conn|
139
- conn.hget "taskinator:#{self.root_key}", :tasks_count
140
- end.to_i
142
+ count = Taskinator.redis do |conn|
143
+ conn.hget self.process_key, :tasks_count
144
+ end
145
+ count.to_i
141
146
  end
142
147
  end
143
148
 
@@ -148,14 +153,15 @@ module Taskinator
148
153
  ).each do |status|
149
154
 
150
155
  define_method "count_#{status}" do
151
- Taskinator.redis do |conn|
152
- conn.hget "taskinator:#{self.root_key}", status
153
- end.to_i
156
+ count = Taskinator.redis do |conn|
157
+ conn.hget self.process_key, status
158
+ end
159
+ count.to_i
154
160
  end
155
161
 
156
162
  define_method "incr_#{status}" do
157
163
  Taskinator.redis do |conn|
158
- conn.hincrby "taskinator:#{self.root_key}", status, 1
164
+ conn.hincrby self.process_key, status, 1
159
165
  end
160
166
  end
161
167
 
@@ -165,6 +171,46 @@ module Taskinator
165
171
 
166
172
  end
167
173
 
174
+ # retrieves the process options of the root process
175
+ # this is so that meta data of the process can be maintained
176
+ # and accessible to instrumentation subscribers
177
+ def process_options
178
+ @process_options ||= begin
179
+ yaml = Taskinator.redis do |conn|
180
+ conn.hget(self.process_key, :options)
181
+ end
182
+ yaml ? Taskinator::Persistence.deserialize(yaml) : {}
183
+ end
184
+ end
185
+
186
+ # prepairs the meta data for instrumentation events
187
+ def instrumentation_payload(additional={})
188
+
189
+ # need to cache here, since this method hits redis, so can't be part of multi statement following
190
+ process_key = self.process_key
191
+
192
+ tasks_count, completed_count, cancelled_count, failed_count = Taskinator.redis do |conn|
193
+ conn.hmget process_key, :tasks_count, :completed, :cancelled, :failed
194
+ end
195
+
196
+ tasks_count = tasks_count.to_f
197
+ completed_percent = tasks_count > 0 ? (completed_count.to_i / tasks_count) * 100.0 : 0.0
198
+ cancelled_percent = tasks_count > 0 ? (cancelled_count.to_i / tasks_count) * 100.0 : 0.0
199
+ failed_percent = tasks_count > 0 ? (failed_count.to_i / tasks_count) * 100.0 : 0.0
200
+
201
+ return {
202
+ :type => self.class.name,
203
+ :process_uuid => process_uuid,
204
+ :process_options => process_options,
205
+ :uuid => uuid,
206
+ :percentage_failed => failed_percent,
207
+ :percentage_cancelled => cancelled_percent,
208
+ :percentage_completed => completed_percent,
209
+ :tasks_count => tasks_count
210
+ }.merge(additional)
211
+
212
+ end
213
+
168
214
  end
169
215
 
170
216
  class RedisSerializationVisitor < Visitor::Base
@@ -181,7 +227,7 @@ module Taskinator
181
227
  @conn = conn
182
228
  @instance = instance
183
229
  @key = instance.key
184
- @root_key = base_visitor.instance.key
230
+ @root = base_visitor.instance
185
231
  @base_visitor = base_visitor
186
232
  @task_count = 0
187
233
  end
@@ -195,8 +241,8 @@ module Taskinator
195
241
 
196
242
  @instance.accept(self)
197
243
 
198
- # add the root key, for easy access later!
199
- @hmset += [:root_key, @root_key]
244
+ # add the process uuid and root key, for easy access later!
245
+ @hmset += [:process_uuid, @root.uuid]
200
246
 
201
247
  # NB: splat args
202
248
  @conn.hmset(*@hmset)
@@ -359,13 +405,11 @@ module Taskinator
359
405
  # arbitrary instance to perform it's work
360
406
  #
361
407
  def lazy_instance_for(base, uuid)
362
- Taskinator.redis do |conn|
363
- type = conn.hget(base.key_for(uuid), :type)
364
- root_key = conn.hget(base.key_for(uuid), :root_key)
365
-
366
- klass = Kernel.const_get(type)
367
- LazyLoader.new(klass, uuid, root_key, @instance_cache)
408
+ type, process_uuid = Taskinator.redis do |conn|
409
+ conn.hmget(base.key_for(uuid), :type, :process_uuid)
368
410
  end
411
+ klass = Kernel.const_get(type)
412
+ LazyLoader.new(klass, uuid, process_uuid, @instance_cache)
369
413
  end
370
414
  end
371
415
 
@@ -380,29 +424,23 @@ module Taskinator
380
424
  # E.g. this is useful for tasks which refer to their parent processes
381
425
  #
382
426
 
383
- def initialize(type, uuid, root_key, instance_cache={})
427
+ def initialize(type, uuid, process_uuid, instance_cache={})
384
428
  @type = type
385
429
  @uuid = uuid
386
- @root_key = root_key
430
+ @process_uuid = process_uuid
387
431
  @instance_cache = instance_cache
388
432
  end
389
433
 
390
434
  # shadows the real methods, but will be the same!
435
+ attr_reader :process_uuid
391
436
  attr_reader :uuid
392
- attr_reader :root_key
393
-
394
- # attempts to reload the actual process
395
- def reload
396
- @instance = nil
397
- __getobj__
398
- @instance ? true : false
399
- end
400
437
 
401
438
  def __getobj__
402
439
  # only fetch the object as needed
403
440
  # and memoize for subsequent calls
404
441
  @instance ||= @type.fetch(@uuid, @instance_cache)
405
442
  end
443
+
406
444
  end
407
445
 
408
446
  class << self