pallets 0.4.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 6bd6ebdb788c39b7d3cb434b60f94d927dacd47c
4
- data.tar.gz: b463178c06885308a2192bbcf3d549c52391acbd
2
+ SHA256:
3
+ metadata.gz: 46a631b6e48f2f1c4efe5bce09c5d74fa2cb064a78d5e546492c6c8ce1dec843
4
+ data.tar.gz: 61f6414e16941de41defb30ac8dc9121703f75251e47422239c311a8b7749787
5
5
  SHA512:
6
- metadata.gz: 7e976084d28bba01327e0a8c41b8a948e27f6b3041d6a3a31665825d66b57225867ec410e9bf4f24afcf13241f41b855e1150601f37de37425fc72ee9582b5a4
7
- data.tar.gz: c05e17653dac7716b9f52958b0bfb295517d614f5f97a00ffb91ad7100e83fbbf20f526f08f8315a1344829d01bd992c4920a7b0881aec4fea9b68eb0d259ff3
6
+ metadata.gz: 4b6f77bd93576dc7dc2c5cf945d52c425a63178740c12fa958f0c18c4ca870da7b02d303c7b8e871504d84a13461f79edc52da2869069322f5a81943bdbd79df
7
+ data.tar.gz: a2a71f4a2343927a94871523b7475f587bb64181e9c6a4654a40111453e2fc2f20e82a02da44adbc32865f68c53a95dd3a9c6f0b641cfd85c7939f1e06d3521f
@@ -0,0 +1 @@
1
+ github: linkyndy
@@ -4,12 +4,8 @@ services:
4
4
  - redis-server
5
5
  cache: bundler
6
6
  rvm:
7
- - 2.3.8
8
- - 2.4.5
9
- - 2.5.3
10
- - 2.6.0
11
- before_install:
12
- # Bundler 2.0 needs a newer RubyGems
13
- - gem update --system
14
- - gem install bundler
7
+ - 2.4.10
8
+ - 2.5.8
9
+ - 2.6.6
10
+ - 2.7.1
15
11
  script: bundle exec rspec
@@ -6,6 +6,36 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
6
6
 
7
7
  ## [Unreleased]
8
8
 
9
+ ## [0.8.0] - 2020-06-09
10
+ ### Added
11
+ - sync output in CLI (#49)
12
+ - support for configuring custom loggers (#50)
13
+
14
+ ### Changed
15
+ - improve job scheduling using jobmasks (#52)
16
+
17
+ ## [0.7.0] - 2020-01-19
18
+ ### Added
19
+ - support for Ruby 2.7 (#46)
20
+
21
+ ## [0.6.0] - 2019-09-02
22
+ ### Added
23
+ - define task aliases in order to reuse tasks within a workflow definition (#44)
24
+ - define anonymous workflows (#45)
25
+
26
+ ## [0.5.1] - 2019-06-01
27
+ ### Changed
28
+ - fix transaction completeness in Appsignal instrumenter (#43)
29
+
30
+ ## [0.5.0] - 2019-05-12
31
+ ### Added
32
+ - wrap job execution with middleware (#38)
33
+ - use `Middleware::JobLogger` for job logging (#39)
34
+ - allow Appsignal instrumentation using `Middleware::AppsignalInstrumenter` (#40)
35
+
36
+ ### Removed
37
+ - support for Ruby 2.3 (#41)
38
+
9
39
  ## [0.4.0] - 2019-04-07
10
40
  ### Added
11
41
  - give up workflow before it finishes by returning `false` in any of its tasks (#25)
@@ -47,6 +77,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
47
77
  ## 0.1.0 - 2018-09-29
48
78
  - Pallets' inception <3
49
79
 
50
- [Unreleased]: https://github.com/linkyndy/pallets/compare/compare/v0.3.0...HEAD
80
+ [Unreleased]: https://github.com/linkyndy/pallets/compare/compare/v0.5.1...HEAD
81
+ [0.5.1]: https://github.com/linkyndy/pallets/compare/v0.5.0...v0.5.1
82
+ [0.5.0]: https://github.com/linkyndy/pallets/compare/v0.4.0...v0.5.0
83
+ [0.4.0]: https://github.com/linkyndy/pallets/compare/v0.3.0...v0.5.0
51
84
  [0.3.0]: https://github.com/linkyndy/pallets/compare/v0.2.0...v0.3.0
52
85
  [0.2.0]: https://github.com/linkyndy/pallets/compare/v0.1.0...v0.2.0
data/Gemfile CHANGED
@@ -3,6 +3,7 @@ source 'https://rubygems.org'
3
3
  gemspec
4
4
 
5
5
  gem 'rake'
6
+ gem 'appsignal'
6
7
 
7
8
  group :test do
8
9
  gem 'rspec'
@@ -0,0 +1,28 @@
1
+ require 'pallets'
2
+
3
+ class Aliases < Pallets::Workflow
4
+ task 'StartSmtpServer'
5
+ task 'SendEmail', as: 'SayHello', depends_on: 'StartSmtpServer'
6
+ task 'SendEmail', as: 'SayGoodbye', depends_on: 'StartSmtpServer'
7
+ task 'StopSmtpServer' => ['SayHello', 'SayGoodbye']
8
+ end
9
+
10
+ class StartSmtpServer < Pallets::Task
11
+ def run
12
+ puts "Starting SMTP server..."
13
+ end
14
+ end
15
+
16
+ class SendEmail < Pallets::Task
17
+ def run
18
+ puts "* sending e-mail"
19
+ end
20
+ end
21
+
22
+ class StopSmtpServer < Pallets::Task
23
+ def run
24
+ puts "Stopped SMTP server"
25
+ end
26
+ end
27
+
28
+ Aliases.new.run
@@ -0,0 +1,13 @@
1
+ require 'pallets'
2
+
3
+ class Anonymous < Pallets::Task
4
+ def run
5
+ puts 'This is anonymous!'
6
+ end
7
+ end
8
+
9
+ workflow = Pallets::Workflow.build do
10
+ task 'Anonymous'
11
+ end
12
+
13
+ workflow.new.run
@@ -0,0 +1,32 @@
1
+ require 'pallets'
2
+ require 'pallets/middleware/appsignal_instrumenter'
3
+
4
+ Appsignal.config = Appsignal::Config.new(
5
+ File.expand_path(File.dirname(__FILE__)),
6
+ "development"
7
+ )
8
+ Appsignal.start
9
+ Appsignal.start_logger
10
+
11
+ Pallets.configure do |c|
12
+ c.middleware << Pallets::Middleware::AppsignalInstrumenter
13
+ end
14
+
15
+ class Appsignaling < Pallets::Workflow
16
+ task 'Signaling'
17
+ task 'ReturningSignal' => 'Signaling'
18
+ end
19
+
20
+ class Signaling < Pallets::Task
21
+ def run
22
+ puts context['signal']
23
+ end
24
+ end
25
+
26
+ class ReturningSignal < Pallets::Task
27
+ def run
28
+ puts 'Ho!'
29
+ end
30
+ end
31
+
32
+ Appsignaling.new(signal: 'Hey').run
@@ -0,0 +1,12 @@
1
+ default: &defaults
2
+ push_api_key: "<%= ENV['APPSIGNAL_PUSH_API_KEY'] %>"
3
+ name: "Pallets"
4
+ debug: true
5
+
6
+ development:
7
+ <<: *defaults
8
+ active: true
9
+
10
+ production:
11
+ <<: *defaults
12
+ active: true
@@ -1,5 +1,13 @@
1
+ require 'logger'
1
2
  require 'pallets'
2
3
 
4
+ class AnnounceProcessing
5
+ def self.call(worker, job, context)
6
+ puts "Starting to process job..."
7
+ yield
8
+ end
9
+ end
10
+
3
11
  Pallets.configure do |c|
4
12
  # Harness 4 Pallets workers per process
5
13
  c.concurrency = 4
@@ -23,6 +31,12 @@ Pallets.configure do |c|
23
31
  # Jobs will be retried up to 5 times upon failure. After that, they will be
24
32
  # given up. Retry times are exponential and happen after: 7, 22, 87, 262, ...
25
33
  c.max_failures = 5
34
+
35
+ # Custom loggers can be used too
36
+ c.logger = Logger.new(STDOUT)
37
+ # Job execution can be wrapped with middleware to provide custom logic.
38
+ # Anything that responds to `call` would do
39
+ c.middleware << AnnounceProcessing
26
40
  end
27
41
 
28
42
  class ConfigSavvy < Pallets::Workflow
@@ -9,6 +9,8 @@ require 'pallets/errors'
9
9
  require 'pallets/graph'
10
10
  require 'pallets/logger'
11
11
  require 'pallets/manager'
12
+ require 'pallets/middleware/job_logger'
13
+ require 'pallets/middleware/stack'
12
14
  require 'pallets/pool'
13
15
  require 'pallets/scheduler'
14
16
  require 'pallets/serializers/base'
@@ -50,18 +52,11 @@ module Pallets
50
52
  end
51
53
  end
52
54
 
53
- def self.logger
54
- @logger ||= begin
55
- logger = Pallets::Logger.new(STDOUT)
56
- # TODO: Ruby 2.4 supports Logger initialization with the arguments below, so
57
- # we can drop this after we drop support for Ruby 2.3
58
- logger.level = Pallets::Logger::INFO
59
- logger.formatter = Pallets::Logger::Formatters::Pretty.new
60
- logger
61
- end
55
+ def self.middleware
56
+ @middleware ||= configuration.middleware
62
57
  end
63
58
 
64
- def self.logger=(logger)
65
- @logger = logger
59
+ def self.logger
60
+ @logger ||= configuration.logger
66
61
  end
67
62
  end
@@ -6,12 +6,12 @@ module Pallets
6
6
  raise NotImplementedError
7
7
  end
8
8
 
9
- def get_context(workflow_id)
9
+ def get_context(wfid)
10
10
  raise NotImplementedError
11
11
  end
12
12
 
13
13
  # Saves a job after successfully processing it
14
- def save(workflow_id, job, context_buffer)
14
+ def save(wfid, jid, job, context_buffer)
15
15
  raise NotImplementedError
16
16
  end
17
17
 
@@ -29,7 +29,7 @@ module Pallets
29
29
  raise NotImplementedError
30
30
  end
31
31
 
32
- def run_workflow(workflow_id, jobs_with_dependencies, context)
32
+ def run_workflow(wfid, jobs, jobmasks, context)
33
33
  raise NotImplementedError
34
34
  end
35
35
  end
@@ -9,6 +9,7 @@ module Pallets
9
9
  RETRY_SET_KEY = 'retry-set'
10
10
  GIVEN_UP_SET_KEY = 'given-up-set'
11
11
  WORKFLOW_QUEUE_KEY = 'workflow-queue:%s'
12
+ JOBMASK_KEY = 'jobmask:%s'
12
13
  CONTEXT_KEY = 'context:%s'
13
14
  REMAINING_KEY = 'remaining:%s'
14
15
 
@@ -41,11 +42,11 @@ module Pallets
41
42
  end
42
43
  end
43
44
 
44
- def save(wfid, job, context_buffer)
45
+ def save(wfid, jid, job, context_buffer)
45
46
  @pool.execute do |client|
46
47
  client.evalsha(
47
48
  @scripts['save'],
48
- [WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY, CONTEXT_KEY % wfid, REMAINING_KEY % wfid],
49
+ [WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY, CONTEXT_KEY % wfid, REMAINING_KEY % wfid, JOBMASK_KEY % jid],
49
50
  context_buffer.to_a << job
50
51
  )
51
52
  end
@@ -81,13 +82,14 @@ module Pallets
81
82
  end
82
83
  end
83
84
 
84
- def run_workflow(wfid, jobs_with_order, context_buffer)
85
+ def run_workflow(wfid, jobs, jobmasks, context_buffer)
85
86
  @pool.execute do |client|
86
87
  client.multi do
88
+ jobmasks.each { |jid, jobmask| client.zadd(JOBMASK_KEY % jid, jobmask) }
87
89
  client.evalsha(
88
90
  @scripts['run_workflow'],
89
91
  [WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, REMAINING_KEY % wfid],
90
- jobs_with_order
92
+ jobs
91
93
  )
92
94
  client.hmset(CONTEXT_KEY % wfid, *context_buffer.to_a) unless context_buffer.empty?
93
95
  end
@@ -6,9 +6,8 @@ redis.call("SET", KEYS[3], eta)
6
6
 
7
7
  -- Queue jobs that are ready to be processed (their score is 0) and
8
8
  -- remove queued jobs from the sorted set
9
- local count = redis.call("ZCOUNT", KEYS[1], 0, 0)
10
- if count > 0 then
11
- local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
9
+ local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
10
+ if #work > 0 then
12
11
  redis.call("LPUSH", KEYS[2], unpack(work))
13
12
  redis.call("ZREM", KEYS[1], unpack(work))
14
13
  end
@@ -10,24 +10,21 @@ if #ARGV > 0 then
10
10
  redis.call("HMSET", KEYS[5], unpack(ARGV))
11
11
  end
12
12
 
13
- -- Decrement all jobs from the sorted set
14
- local all_pending = redis.call("ZRANGE", KEYS[1], 0, -1)
15
- for score, task in pairs(all_pending) do
16
- redis.call("ZINCRBY", KEYS[1], -1, task)
17
- end
13
+ -- Decrement jobs from the sorted set by applying a jobmask
14
+ redis.call("ZUNIONSTORE", KEYS[1], 2, KEYS[1], KEYS[7])
15
+ redis.call("DEL", KEYS[7])
18
16
 
19
17
  -- Queue jobs that are ready to be processed (their score is 0) and
20
18
  -- remove queued jobs from sorted set
21
- local count = redis.call("ZCOUNT", KEYS[1], 0, 0)
22
- if count > 0 then
23
- local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
19
+ local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
20
+ if #work > 0 then
24
21
  redis.call("LPUSH", KEYS[2], unpack(work))
25
22
  redis.call("ZREM", KEYS[1], unpack(work))
26
23
  end
27
24
 
28
25
  -- Decrement ETA and remove it together with the context if all tasks have
29
26
  -- been processed (ETA is 0)
30
- redis.call("DECR", KEYS[6])
31
- if tonumber(redis.call("GET", KEYS[6])) == 0 then
27
+ local remaining = redis.call("DECR", KEYS[6])
28
+ if remaining == 0 then
32
29
  redis.call("DEL", KEYS[5], KEYS[6])
33
30
  end
@@ -1,5 +1,7 @@
1
1
  require 'optparse'
2
2
 
3
+ $stdout.sync = true
4
+
3
5
  module Pallets
4
6
  class CLI
5
7
  def initialize
@@ -20,6 +20,9 @@ module Pallets
20
20
  # period, it is considered failed, and scheduled to be processed again
21
21
  attr_accessor :job_timeout
22
22
 
23
+ # Custom logger used throughout Pallets
24
+ attr_writer :logger
25
+
23
26
  # Maximum number of failures allowed per job. Can also be configured on a
24
27
  # per task basis
25
28
  attr_accessor :max_failures
@@ -30,6 +33,15 @@ module Pallets
30
33
  # Serializer used for jobs
31
34
  attr_accessor :serializer
32
35
 
36
+ # Middleware used to wrap job execution with custom logic. Acts like a stack
37
+ # and accepts callable objects (lambdas, procs, objects that respond to call)
38
+ # that take three arguments: the worker handling the job, the job hash and
39
+ # the context
40
+ #
41
+ # A minimal example of a middleware is:
42
+ # ->(worker, job, context, &b) { puts 'Hello World!'; b.call }
43
+ attr_reader :middleware
44
+
33
45
  def initialize
34
46
  @backend = :redis
35
47
  @backend_args = {}
@@ -39,10 +51,24 @@ module Pallets
39
51
  @job_timeout = 1_800 # 30 minutes
40
52
  @max_failures = 3
41
53
  @serializer = :json
54
+ @middleware = default_middleware
55
+ end
56
+
57
+ def logger
58
+ @logger || Pallets::Logger.new(STDOUT,
59
+ level: Pallets::Logger::INFO,
60
+ formatter: Pallets::Logger::Formatters::Pretty.new
61
+ )
42
62
  end
43
63
 
44
64
  def pool_size
45
65
  @pool_size || @concurrency + 1
46
66
  end
67
+
68
+ def default_middleware
69
+ Middleware::Stack[
70
+ Middleware::JobLogger
71
+ ]
72
+ end
47
73
  end
48
74
  end
@@ -1,7 +1,13 @@
1
1
  module Pallets
2
2
  module DSL
3
3
  module Workflow
4
- def task(arg, depends_on: nil, max_failures: nil, &block)
4
+ def task(arg=nil, as: nil, depends_on: nil, max_failures: nil, **kwargs)
5
+ # Have to work more to keep Pallets' nice DSL valid in Ruby 2.7
6
+ arg = !kwargs.empty? ? kwargs : arg
7
+ raise ArgumentError, 'Task is incorrectly defined. It must receive '\
8
+ 'either a name, or a name => dependencies pair as '\
9
+ 'the first argument' unless arg
10
+
5
11
  klass, dependencies = case arg
6
12
  when Hash
7
13
  # The `task Foo => Bar` notation
@@ -12,10 +18,13 @@ module Pallets
12
18
  end
13
19
 
14
20
  task_class = klass.to_s
21
+ as ||= task_class
22
+
15
23
  dependencies = Array(dependencies).compact.uniq.map(&:to_s)
16
- graph.add(task_class, dependencies)
24
+ graph.add(as, dependencies)
17
25
 
18
- task_config[task_class] = {
26
+ task_config[as] = {
27
+ 'workflow_class' => self.name,
19
28
  'task_class' => task_class,
20
29
  'max_failures' => max_failures || Pallets.configuration.max_failures
21
30
  }
@@ -9,40 +9,39 @@ module Pallets
9
9
  end
10
10
 
11
11
  def add(node, dependencies)
12
- @nodes[node] = dependencies
12
+ raise WorkflowError, "Task #{node} is already defined in this workflow. "\
13
+ "Use `task '#{node}', as: 'FooBar'` to define an "\
14
+ "alias and reuse task" if nodes.key?(node)
15
+
16
+ nodes[node] = dependencies
13
17
  end
14
18
 
15
19
  def parents(node)
16
- @nodes[node]
20
+ nodes[node]
17
21
  end
18
22
 
19
23
  def empty?
20
- @nodes.empty?
24
+ nodes.empty?
21
25
  end
22
26
 
23
- # Returns nodes topologically sorted, together with their order (number of
24
- # nodes that have to be executed prior)
25
- def sorted_with_order
26
- # Identify groups of nodes that can be executed concurrently
27
- groups = tsort_each.slice_when { |a, b| parents(a) != parents(b) }
28
-
29
- # Assign order to each node
30
- i = 0
31
- groups.flat_map do |group|
32
- group_with_order = group.product([i])
33
- i += group.size
34
- group_with_order
27
+ def each
28
+ return enum_for(__method__) unless block_given?
29
+
30
+ tsort_each do |node|
31
+ yield(node, parents(node))
35
32
  end
36
33
  end
37
34
 
38
35
  private
39
36
 
37
+ attr_reader :nodes
38
+
40
39
  def tsort_each_node(&block)
41
- @nodes.each_key(&block)
40
+ nodes.each_key(&block)
42
41
  end
43
42
 
44
43
  def tsort_each_child(node, &block)
45
- @nodes.fetch(node).each(&block)
44
+ nodes.fetch(node).each(&block)
46
45
  rescue KeyError
47
46
  raise WorkflowError, "Task #{node} is marked as a dependency but not defined"
48
47
  end
@@ -0,0 +1,47 @@
1
+ require 'appsignal'
2
+
3
+ module Pallets
4
+ module Middleware
5
+ class AppsignalInstrumenter
6
+ extend Appsignal::Hooks::Helpers
7
+
8
+ def self.call(worker, job, context)
9
+ job_status = nil
10
+ transaction = Appsignal::Transaction.create(
11
+ SecureRandom.uuid,
12
+ Appsignal::Transaction::BACKGROUND_JOB,
13
+ Appsignal::Transaction::GenericRequest.new(queue_start: job['created_at'])
14
+ )
15
+
16
+ Appsignal.instrument('perform_job.pallets') do
17
+ begin
18
+ yield
19
+ rescue Exception => ex
20
+ job_status = :failed
21
+ transaction.set_error(ex)
22
+ raise
23
+ ensure
24
+ transaction.set_action_if_nil("#{job['task_class']}#run (#{job['workflow_class']})")
25
+ transaction.params = filtered_context(context)
26
+ formatted_metadata(job).each { |kv| transaction.set_metadata(*kv) }
27
+ transaction.set_http_or_background_queue_start
28
+ Appsignal.increment_counter('pallets_job_count', 1, status: job_status || :successful)
29
+ end
30
+ end
31
+ ensure
32
+ Appsignal::Transaction.complete_current!
33
+ end
34
+
35
+ def self.filtered_context(context)
36
+ Appsignal::Utils::HashSanitizer.sanitize(
37
+ context,
38
+ Appsignal.config[:filter_parameters]
39
+ )
40
+ end
41
+
42
+ def self.formatted_metadata(job)
43
+ job.map { |k, v| [k, truncate(string_or_inspect(v))] }
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,26 @@
1
+ module Pallets
2
+ module Middleware
3
+ class JobLogger
4
+ def self.call(worker, job, context)
5
+ Pallets.logger.info 'Started', extract_metadata(worker.id, job)
6
+ result = yield
7
+ Pallets.logger.info 'Done', extract_metadata(worker.id, job)
8
+ result
9
+ rescue => ex
10
+ Pallets.logger.warn "#{ex.class.name}: #{ex.message}", extract_metadata(worker.id, job)
11
+ Pallets.logger.warn ex.backtrace.join("\n"), extract_metadata(worker.id, job) unless ex.backtrace.nil?
12
+ raise
13
+ end
14
+
15
+ def self.extract_metadata(wid, job)
16
+ {
17
+ wid: wid,
18
+ wfid: job['wfid'],
19
+ jid: job['jid'],
20
+ wf: job['workflow_class'],
21
+ tsk: job['task_class'],
22
+ }
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,13 @@
1
+ module Pallets
2
+ module Middleware
3
+ # Array-like class that acts like a stack and additionally provides the
4
+ # means to wrap an operation with callable objects
5
+ class Stack < Array
6
+ def invoke(*args, &block)
7
+ reverse.inject(block) do |memo, middleware|
8
+ lambda { middleware.call(*args, &memo) }
9
+ end.call
10
+ end
11
+ end
12
+ end
13
+ end
@@ -4,12 +4,11 @@ module Pallets
4
4
  module Serializers
5
5
  class Json < Base
6
6
  def dump(data)
7
- # TODO: Remove option after dropping support for Ruby 2.3
8
- JSON.generate(data, quirks_mode: true)
7
+ JSON.generate(data)
9
8
  end
10
9
 
11
10
  def load(data)
12
- JSON.parse(data, quirks_mode: true)
11
+ JSON.parse(data)
13
12
  end
14
13
  end
15
14
  end
@@ -1,3 +1,3 @@
1
1
  module Pallets
2
- VERSION = "0.4.0"
2
+ VERSION = "0.8.0"
3
3
  end
@@ -69,8 +69,6 @@ module Pallets
69
69
  return
70
70
  end
71
71
 
72
- Pallets.logger.info "Started", extract_metadata(job_hash)
73
-
74
72
  context = Context[
75
73
  serializer.load_context(backend.get_context(job_hash['wfid']))
76
74
  ]
@@ -78,7 +76,9 @@ module Pallets
78
76
  task_class = Pallets::Util.constantize(job_hash["task_class"])
79
77
  task = task_class.new(context)
80
78
  begin
81
- task_result = task.run
79
+ task_result = middleware.invoke(self, job_hash, context) do
80
+ task.run
81
+ end
82
82
  rescue => ex
83
83
  handle_job_error(ex, job, job_hash)
84
84
  else
@@ -91,8 +91,6 @@ module Pallets
91
91
  end
92
92
 
93
93
  def handle_job_error(ex, job, job_hash)
94
- Pallets.logger.warn "#{ex.class.name}: #{ex.message}", extract_metadata(job_hash)
95
- Pallets.logger.warn ex.backtrace.join("\n"), extract_metadata(job_hash) unless ex.backtrace.nil?
96
94
  failures = job_hash.fetch('failures', 0) + 1
97
95
  new_job = serializer.dump(job_hash.merge(
98
96
  'failures' => failures,
@@ -106,7 +104,6 @@ module Pallets
106
104
  backend.retry(new_job, job, retry_at)
107
105
  else
108
106
  backend.give_up(new_job, job)
109
- Pallets.logger.info "Gave up after #{failures} failed attempts", extract_metadata(job_hash)
110
107
  end
111
108
  end
112
109
 
@@ -116,22 +113,10 @@ module Pallets
116
113
  'reason' => 'returned_false'
117
114
  ))
118
115
  backend.give_up(new_job, job)
119
- Pallets.logger.info "Gave up after returning false", extract_metadata(job_hash)
120
116
  end
121
117
 
122
118
  def handle_job_success(context, job, job_hash)
123
- backend.save(job_hash['wfid'], job, serializer.dump_context(context.buffer))
124
- Pallets.logger.info "Done", extract_metadata(job_hash)
125
- end
126
-
127
- def extract_metadata(job_hash)
128
- {
129
- wid: id,
130
- wfid: job_hash['wfid'],
131
- jid: job_hash['jid'],
132
- wf: job_hash['workflow_class'],
133
- tsk: job_hash['task_class']
134
- }
119
+ backend.save(job_hash['wfid'], job_hash['jid'], job, serializer.dump_context(context.buffer))
135
120
  end
136
121
 
137
122
  def backoff_in_seconds(count)
@@ -145,5 +130,9 @@ module Pallets
145
130
  def serializer
146
131
  @serializer ||= Pallets.serializer
147
132
  end
133
+
134
+ def middleware
135
+ @middleware ||= Pallets.middleware
136
+ end
148
137
  end
149
138
  end
@@ -4,6 +4,12 @@ module Pallets
4
4
 
5
5
  attr_reader :context
6
6
 
7
+ def self.build(&block)
8
+ Class.new(self).tap do |workflow_class|
9
+ workflow_class.instance_eval(&block)
10
+ end
11
+ end
12
+
7
13
  def initialize(context_hash = {})
8
14
  @id = nil
9
15
  # Passed in context hash needs to be buffered
@@ -14,7 +20,7 @@ module Pallets
14
20
  raise WorkflowError, "#{self.class.name} has no tasks. Workflows "\
15
21
  "must contain at least one task" if self.class.graph.empty?
16
22
 
17
- backend.run_workflow(id, jobs_with_order, serializer.dump_context(context.buffer))
23
+ backend.run_workflow(id, *prepare_jobs, serializer.dump_context(context.buffer))
18
24
  id
19
25
  end
20
26
 
@@ -24,20 +30,29 @@ module Pallets
24
30
 
25
31
  private
26
32
 
27
- def jobs_with_order
28
- self.class.graph.sorted_with_order.map do |task_class, order|
29
- job = serializer.dump(construct_job(task_class))
30
- [order, job]
33
+ def prepare_jobs
34
+ jobs = []
35
+ jobmasks = Hash.new { |h, k| h[k] = [] }
36
+ acc = {}
37
+
38
+ self.class.graph.each do |task_alias, dependencies|
39
+ job_hash = construct_job(task_alias)
40
+ acc[task_alias] = job_hash['jid']
41
+ job = serializer.dump(job_hash)
42
+
43
+ jobs << [dependencies.size, job]
44
+ dependencies.each { |d| jobmasks[acc[d]] << [-1, job] }
31
45
  end
46
+
47
+ [jobs, jobmasks]
32
48
  end
33
49
 
34
- def construct_job(task_class)
35
- {}.tap do |job|
50
+ def construct_job(task_alias)
51
+ Hash[self.class.task_config[task_alias]].tap do |job|
36
52
  job['wfid'] = id
37
- job['jid'] = "J#{Pallets::Util.generate_id(task_class)}".upcase
38
- job['workflow_class'] = self.class.name
53
+ job['jid'] = "J#{Pallets::Util.generate_id(job['task_class'])}".upcase
39
54
  job['created_at'] = Time.now.to_f
40
- end.merge(self.class.task_config[task_class])
55
+ end
41
56
  end
42
57
 
43
58
  def backend
@@ -48,6 +63,10 @@ module Pallets
48
63
  Pallets.serializer
49
64
  end
50
65
 
66
+ def self.name
67
+ @name ||= super || '<Anonymous>'
68
+ end
69
+
51
70
  def self.task_config
52
71
  @task_config ||= {}
53
72
  end
@@ -18,7 +18,7 @@ Gem::Specification.new do |spec|
18
18
  spec.executables = ['pallets']
19
19
  spec.require_paths = ['lib']
20
20
 
21
- spec.required_ruby_version = '>= 2.3'
21
+ spec.required_ruby_version = '>= 2.4'
22
22
 
23
23
  spec.add_dependency 'redis'
24
24
  spec.add_dependency 'msgpack'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pallets
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.0
4
+ version: 0.8.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Horak
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-04-07 00:00:00.000000000 Z
11
+ date: 2020-06-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: redis
@@ -46,6 +46,7 @@ executables:
46
46
  extensions: []
47
47
  extra_rdoc_files: []
48
48
  files:
49
+ - ".github/FUNDING.yml"
49
50
  - ".gitignore"
50
51
  - ".rspec"
51
52
  - ".travis.yml"
@@ -56,6 +57,10 @@ files:
56
57
  - README.md
57
58
  - Rakefile
58
59
  - bin/pallets
60
+ - examples/aliases.rb
61
+ - examples/anonymous.rb
62
+ - examples/appsignal.rb
63
+ - examples/config/appsignal.yml
59
64
  - examples/config_savvy.rb
60
65
  - examples/do_groceries.rb
61
66
  - examples/hello_world.rb
@@ -75,6 +80,9 @@ files:
75
80
  - lib/pallets/graph.rb
76
81
  - lib/pallets/logger.rb
77
82
  - lib/pallets/manager.rb
83
+ - lib/pallets/middleware/appsignal_instrumenter.rb
84
+ - lib/pallets/middleware/job_logger.rb
85
+ - lib/pallets/middleware/stack.rb
78
86
  - lib/pallets/pool.rb
79
87
  - lib/pallets/scheduler.rb
80
88
  - lib/pallets/serializers/base.rb
@@ -98,15 +106,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
98
106
  requirements:
99
107
  - - ">="
100
108
  - !ruby/object:Gem::Version
101
- version: '2.3'
109
+ version: '2.4'
102
110
  required_rubygems_version: !ruby/object:Gem::Requirement
103
111
  requirements:
104
112
  - - ">="
105
113
  - !ruby/object:Gem::Version
106
114
  version: '0'
107
115
  requirements: []
108
- rubyforge_project:
109
- rubygems_version: 2.5.2.3
116
+ rubygems_version: 3.1.2
110
117
  signing_key:
111
118
  specification_version: 4
112
119
  summary: Toy workflow engine, written in Ruby