pallets 0.5.0 → 0.9.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/FUNDING.yml +1 -0
- data/.travis.yml +4 -7
- data/CHANGELOG.md +35 -1
- data/LICENSE +1 -1
- data/README.md +6 -6
- data/examples/aliases.rb +28 -0
- data/examples/anonymous.rb +13 -0
- data/lib/pallets.rb +1 -4
- data/lib/pallets/backends/base.rb +9 -4
- data/lib/pallets/backends/redis.rb +23 -8
- data/lib/pallets/backends/scripts/discard.lua +11 -0
- data/lib/pallets/backends/scripts/give_up.lua +6 -1
- data/lib/pallets/backends/scripts/run_workflow.lua +2 -3
- data/lib/pallets/backends/scripts/save.lua +10 -13
- data/lib/pallets/cli.rb +6 -0
- data/lib/pallets/configuration.rb +5 -0
- data/lib/pallets/dsl/workflow.rb +12 -3
- data/lib/pallets/graph.rb +16 -17
- data/lib/pallets/logger.rb +10 -2
- data/lib/pallets/middleware/appsignal_instrumenter.rb +2 -1
- data/lib/pallets/middleware/job_logger.rb +19 -8
- data/lib/pallets/version.rb +1 -1
- data/lib/pallets/worker.rb +7 -7
- data/lib/pallets/workflow.rb +29 -10
- data/pallets.gemspec +3 -3
- metadata +16 -12
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ede234db1dfe8c744cf11e6e2fde3eb2e982e7764738715acbbad35710422274
|
4
|
+
data.tar.gz: a55b3a714dcc5db2a294b27ca53724a1236111e08f63078bcef5007bda65532c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 74e23087ba192aeffeb7e167d751bfb6f85de4018fb3c16e992ac6119d7670d60862f41a8e1624817a25944409602e41ee38f13adad7b14a38b185d51142b644
|
7
|
+
data.tar.gz: 96ae4f2aa4edf5d27d366e7a35f16527e9d390a45794d306acc109b1be335682c8b1e13e7d708d7eb3451eb8b6ed96db37ba724da08abb13d7b84f3468869826
|
data/.github/FUNDING.yml
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
github: linkyndy
|
data/.travis.yml
CHANGED
@@ -4,11 +4,8 @@ services:
|
|
4
4
|
- redis-server
|
5
5
|
cache: bundler
|
6
6
|
rvm:
|
7
|
-
- 2.4.
|
8
|
-
- 2.5.
|
9
|
-
- 2.6.
|
10
|
-
|
11
|
-
# Bundler 2.0 needs a newer RubyGems
|
12
|
-
- gem update --system
|
13
|
-
- gem install bundler
|
7
|
+
- 2.4.10
|
8
|
+
- 2.5.8
|
9
|
+
- 2.6.6
|
10
|
+
- 2.7.1
|
14
11
|
script: bundle exec rspec
|
data/CHANGELOG.md
CHANGED
@@ -6,6 +6,39 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
6
6
|
|
7
7
|
## [Unreleased]
|
8
8
|
|
9
|
+
## [0.9.0] - 2020-07-05
|
10
|
+
### Added
|
11
|
+
- limit number of jobs in given up set by number (#56)
|
12
|
+
- job duration and metadata to all task logs (#57)
|
13
|
+
|
14
|
+
### Changed
|
15
|
+
- remove all related workflow keys when giving up on a job (#55)
|
16
|
+
- support redis-rb ~> 4.2 (#58)
|
17
|
+
|
18
|
+
### Removed
|
19
|
+
- support for configuring custom loggers (#57)
|
20
|
+
|
21
|
+
## [0.8.0] - 2020-06-09
|
22
|
+
### Added
|
23
|
+
- sync output in CLI (#49)
|
24
|
+
- support for configuring custom loggers (#50)
|
25
|
+
|
26
|
+
### Changed
|
27
|
+
- improve job scheduling using jobmasks (#52)
|
28
|
+
|
29
|
+
## [0.7.0] - 2020-01-19
|
30
|
+
### Added
|
31
|
+
- support for Ruby 2.7 (#46)
|
32
|
+
|
33
|
+
## [0.6.0] - 2019-09-02
|
34
|
+
### Added
|
35
|
+
- define task aliases in order to reuse tasks within a workflow definition (#44)
|
36
|
+
- define anonymous workflows (#45)
|
37
|
+
|
38
|
+
## [0.5.1] - 2019-06-01
|
39
|
+
### Changed
|
40
|
+
- fix transaction completeness in Appsignal instrumenter (#43)
|
41
|
+
|
9
42
|
## [0.5.0] - 2019-05-12
|
10
43
|
### Added
|
11
44
|
- wrap job execution with middleware (#38)
|
@@ -56,7 +89,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
56
89
|
## 0.1.0 - 2018-09-29
|
57
90
|
- Pallets' inception <3
|
58
91
|
|
59
|
-
[Unreleased]: https://github.com/linkyndy/pallets/compare/compare/v0.5.
|
92
|
+
[Unreleased]: https://github.com/linkyndy/pallets/compare/compare/v0.5.1...HEAD
|
93
|
+
[0.5.1]: https://github.com/linkyndy/pallets/compare/v0.5.0...v0.5.1
|
60
94
|
[0.5.0]: https://github.com/linkyndy/pallets/compare/v0.4.0...v0.5.0
|
61
95
|
[0.4.0]: https://github.com/linkyndy/pallets/compare/v0.3.0...v0.5.0
|
62
96
|
[0.3.0]: https://github.com/linkyndy/pallets/compare/v0.2.0...v0.3.0
|
data/LICENSE
CHANGED
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
[![Build Status](https://travis-ci.com/linkyndy/pallets.svg?branch=master)](https://travis-ci.com/linkyndy/pallets)
|
4
4
|
|
5
|
-
|
5
|
+
Simple and reliable workflow engine, written in Ruby
|
6
6
|
|
7
7
|
## It is plain simple!
|
8
8
|
|
@@ -11,10 +11,10 @@ Toy workflow engine, written in Ruby
|
|
11
11
|
require 'pallets'
|
12
12
|
|
13
13
|
class MyWorkflow < Pallets::Workflow
|
14
|
-
task Foo
|
15
|
-
task Bar => Foo
|
16
|
-
task Baz => Foo
|
17
|
-
task Qux => [Bar, Baz]
|
14
|
+
task 'Foo'
|
15
|
+
task 'Bar' => 'Foo'
|
16
|
+
task 'Baz' => 'Foo'
|
17
|
+
task 'Qux' => ['Bar', 'Baz']
|
18
18
|
end
|
19
19
|
|
20
20
|
class Foo < Pallets::Task
|
@@ -120,7 +120,7 @@ end
|
|
120
120
|
|
121
121
|
## Motivation
|
122
122
|
|
123
|
-
The main reason for
|
123
|
+
The main reason for Pallets' existence was the need of a fast, simple and reliable workflow engine, one that is easily extensible with various backends and serializer, one that does not lose your data and one that is intelligent enough to concurrently schedule a workflow's tasks.
|
124
124
|
|
125
125
|
## Status
|
126
126
|
|
data/examples/aliases.rb
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
require 'pallets'
|
2
|
+
|
3
|
+
class Aliases < Pallets::Workflow
|
4
|
+
task 'StartSmtpServer'
|
5
|
+
task 'SendEmail', as: 'SayHello', depends_on: 'StartSmtpServer'
|
6
|
+
task 'SendEmail', as: 'SayGoodbye', depends_on: 'StartSmtpServer'
|
7
|
+
task 'StopSmtpServer' => ['SayHello', 'SayGoodbye']
|
8
|
+
end
|
9
|
+
|
10
|
+
class StartSmtpServer < Pallets::Task
|
11
|
+
def run
|
12
|
+
puts "Starting SMTP server..."
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
class SendEmail < Pallets::Task
|
17
|
+
def run
|
18
|
+
puts "* sending e-mail"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
class StopSmtpServer < Pallets::Task
|
23
|
+
def run
|
24
|
+
puts "Stopped SMTP server"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
Aliases.new.run
|
data/lib/pallets.rb
CHANGED
@@ -38,6 +38,7 @@ module Pallets
|
|
38
38
|
cls.new(
|
39
39
|
blocking_timeout: configuration.blocking_timeout,
|
40
40
|
failed_job_lifespan: configuration.failed_job_lifespan,
|
41
|
+
failed_job_max_count: configuration.failed_job_max_count,
|
41
42
|
job_timeout: configuration.job_timeout,
|
42
43
|
pool_size: configuration.pool_size,
|
43
44
|
**configuration.backend_args
|
@@ -62,8 +63,4 @@ module Pallets
|
|
62
63
|
formatter: Pallets::Logger::Formatters::Pretty.new
|
63
64
|
)
|
64
65
|
end
|
65
|
-
|
66
|
-
def self.logger=(logger)
|
67
|
-
@logger = logger
|
68
|
-
end
|
69
66
|
end
|
@@ -6,12 +6,12 @@ module Pallets
|
|
6
6
|
raise NotImplementedError
|
7
7
|
end
|
8
8
|
|
9
|
-
def get_context(
|
9
|
+
def get_context(wfid)
|
10
10
|
raise NotImplementedError
|
11
11
|
end
|
12
12
|
|
13
13
|
# Saves a job after successfully processing it
|
14
|
-
def save(
|
14
|
+
def save(wfid, jid, job, context_buffer)
|
15
15
|
raise NotImplementedError
|
16
16
|
end
|
17
17
|
|
@@ -20,8 +20,13 @@ module Pallets
|
|
20
20
|
raise NotImplementedError
|
21
21
|
end
|
22
22
|
|
23
|
+
# Discards malformed job
|
24
|
+
def discard(job)
|
25
|
+
raise NotImplementedError
|
26
|
+
end
|
27
|
+
|
23
28
|
# Gives up job after repeteadly failing to process it
|
24
|
-
def give_up(job, old_job)
|
29
|
+
def give_up(wfid, job, old_job)
|
25
30
|
raise NotImplementedError
|
26
31
|
end
|
27
32
|
|
@@ -29,7 +34,7 @@ module Pallets
|
|
29
34
|
raise NotImplementedError
|
30
35
|
end
|
31
36
|
|
32
|
-
def run_workflow(
|
37
|
+
def run_workflow(wfid, jobs, jobmasks, context)
|
33
38
|
raise NotImplementedError
|
34
39
|
end
|
35
40
|
end
|
@@ -9,12 +9,15 @@ module Pallets
|
|
9
9
|
RETRY_SET_KEY = 'retry-set'
|
10
10
|
GIVEN_UP_SET_KEY = 'given-up-set'
|
11
11
|
WORKFLOW_QUEUE_KEY = 'workflow-queue:%s'
|
12
|
+
JOBMASKS_KEY = 'jobmasks:%s'
|
13
|
+
JOBMASK_KEY = 'jobmask:%s'
|
12
14
|
CONTEXT_KEY = 'context:%s'
|
13
15
|
REMAINING_KEY = 'remaining:%s'
|
14
16
|
|
15
|
-
def initialize(blocking_timeout:, failed_job_lifespan:, job_timeout:, pool_size:, **options)
|
17
|
+
def initialize(blocking_timeout:, failed_job_lifespan:, failed_job_max_count:, job_timeout:, pool_size:, **options)
|
16
18
|
@blocking_timeout = blocking_timeout
|
17
19
|
@failed_job_lifespan = failed_job_lifespan
|
20
|
+
@failed_job_max_count = failed_job_max_count
|
18
21
|
@job_timeout = job_timeout
|
19
22
|
@pool = Pallets::Pool.new(pool_size) { ::Redis.new(options) }
|
20
23
|
|
@@ -41,11 +44,11 @@ module Pallets
|
|
41
44
|
end
|
42
45
|
end
|
43
46
|
|
44
|
-
def save(wfid, job, context_buffer)
|
47
|
+
def save(wfid, jid, job, context_buffer)
|
45
48
|
@pool.execute do |client|
|
46
49
|
client.evalsha(
|
47
50
|
@scripts['save'],
|
48
|
-
[WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY, CONTEXT_KEY % wfid, REMAINING_KEY % wfid],
|
51
|
+
[WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY, CONTEXT_KEY % wfid, REMAINING_KEY % wfid, JOBMASK_KEY % jid, JOBMASKS_KEY % wfid],
|
49
52
|
context_buffer.to_a << job
|
50
53
|
)
|
51
54
|
end
|
@@ -61,12 +64,22 @@ module Pallets
|
|
61
64
|
end
|
62
65
|
end
|
63
66
|
|
64
|
-
def
|
67
|
+
def discard(job)
|
65
68
|
@pool.execute do |client|
|
66
69
|
client.evalsha(
|
67
|
-
@scripts['
|
70
|
+
@scripts['discard'],
|
68
71
|
[GIVEN_UP_SET_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY],
|
69
|
-
[Time.now.to_f, job,
|
72
|
+
[Time.now.to_f, job, Time.now.to_f - @failed_job_lifespan, @failed_job_max_count]
|
73
|
+
)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
def give_up(wfid, job, old_job)
|
78
|
+
@pool.execute do |client|
|
79
|
+
client.evalsha(
|
80
|
+
@scripts['give_up'],
|
81
|
+
[GIVEN_UP_SET_KEY, RELIABILITY_QUEUE_KEY, RELIABILITY_SET_KEY, JOBMASKS_KEY % wfid, WORKFLOW_QUEUE_KEY % wfid, REMAINING_KEY % wfid, CONTEXT_KEY % wfid],
|
82
|
+
[Time.now.to_f, job, old_job, Time.now.to_f - @failed_job_lifespan, @failed_job_max_count]
|
70
83
|
)
|
71
84
|
end
|
72
85
|
end
|
@@ -81,13 +94,15 @@ module Pallets
|
|
81
94
|
end
|
82
95
|
end
|
83
96
|
|
84
|
-
def run_workflow(wfid,
|
97
|
+
def run_workflow(wfid, jobs, jobmasks, context_buffer)
|
85
98
|
@pool.execute do |client|
|
86
99
|
client.multi do
|
100
|
+
jobmasks.each { |jid, jobmask| client.zadd(JOBMASK_KEY % jid, jobmask) }
|
101
|
+
client.sadd(JOBMASKS_KEY % wfid, jobmasks.map { |jid, _| JOBMASK_KEY % jid }) unless jobmasks.empty?
|
87
102
|
client.evalsha(
|
88
103
|
@scripts['run_workflow'],
|
89
104
|
[WORKFLOW_QUEUE_KEY % wfid, QUEUE_KEY, REMAINING_KEY % wfid],
|
90
|
-
|
105
|
+
jobs
|
91
106
|
)
|
92
107
|
client.hmset(CONTEXT_KEY % wfid, *context_buffer.to_a) unless context_buffer.empty?
|
93
108
|
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
-- Remove job from reliability queue
|
2
|
+
redis.call("LREM", KEYS[2], 0, ARGV[2])
|
3
|
+
redis.call("ZREM", KEYS[3], ARGV[2])
|
4
|
+
|
5
|
+
-- Add job and its fail time (score) to failed sorted set
|
6
|
+
redis.call("ZADD", KEYS[1], ARGV[1], ARGV[2])
|
7
|
+
|
8
|
+
-- Remove any jobs that have been given up long enough ago (their score is
|
9
|
+
-- below given value) and make sure the number of jobs is capped
|
10
|
+
redis.call("ZREMRANGEBYSCORE", KEYS[1], "-inf", ARGV[3])
|
11
|
+
redis.call("ZREMRANGEBYRANK", KEYS[1], 0, -ARGV[4] - 1)
|
@@ -5,6 +5,11 @@ redis.call("ZREM", KEYS[3], ARGV[3])
|
|
5
5
|
-- Add job and its fail time (score) to failed sorted set
|
6
6
|
redis.call("ZADD", KEYS[1], ARGV[1], ARGV[2])
|
7
7
|
|
8
|
+
-- Remove all related workflow keys
|
9
|
+
local keys = redis.call("SMEMBERS", KEYS[4])
|
10
|
+
redis.call("DEL", KEYS[4], KEYS[5], KEYS[6], KEYS[7], unpack(keys))
|
11
|
+
|
8
12
|
-- Remove any jobs that have been given up long enough ago (their score is
|
9
|
-
-- below given value)
|
13
|
+
-- below given value) and make sure the number of jobs is capped
|
10
14
|
redis.call("ZREMRANGEBYSCORE", KEYS[1], "-inf", ARGV[4])
|
15
|
+
redis.call("ZREMRANGEBYRANK", KEYS[1], 0, -ARGV[5] - 1)
|
@@ -6,9 +6,8 @@ redis.call("SET", KEYS[3], eta)
|
|
6
6
|
|
7
7
|
-- Queue jobs that are ready to be processed (their score is 0) and
|
8
8
|
-- remove queued jobs from the sorted set
|
9
|
-
local
|
10
|
-
if
|
11
|
-
local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
|
9
|
+
local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
|
10
|
+
if #work > 0 then
|
12
11
|
redis.call("LPUSH", KEYS[2], unpack(work))
|
13
12
|
redis.call("ZREM", KEYS[1], unpack(work))
|
14
13
|
end
|
@@ -10,24 +10,21 @@ if #ARGV > 0 then
|
|
10
10
|
redis.call("HMSET", KEYS[5], unpack(ARGV))
|
11
11
|
end
|
12
12
|
|
13
|
-
-- Decrement
|
14
|
-
|
15
|
-
|
16
|
-
redis.call("ZINCRBY", KEYS[1], -1, task)
|
17
|
-
end
|
13
|
+
-- Decrement jobs from the sorted set by applying a jobmask
|
14
|
+
redis.call("ZUNIONSTORE", KEYS[1], 2, KEYS[1], KEYS[7])
|
15
|
+
redis.call("DEL", KEYS[7])
|
18
16
|
|
19
17
|
-- Queue jobs that are ready to be processed (their score is 0) and
|
20
18
|
-- remove queued jobs from sorted set
|
21
|
-
local
|
22
|
-
if
|
23
|
-
local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
|
19
|
+
local work = redis.call("ZRANGEBYSCORE", KEYS[1], 0, 0)
|
20
|
+
if #work > 0 then
|
24
21
|
redis.call("LPUSH", KEYS[2], unpack(work))
|
25
22
|
redis.call("ZREM", KEYS[1], unpack(work))
|
26
23
|
end
|
27
24
|
|
28
|
-
-- Decrement ETA and remove it together with the context if all
|
29
|
-
-- been processed (ETA is 0)
|
30
|
-
redis.call("DECR", KEYS[6])
|
31
|
-
if
|
32
|
-
redis.call("DEL", KEYS[5], KEYS[6])
|
25
|
+
-- Decrement ETA and remove it together with the context and jobmasks if all
|
26
|
+
-- tasks have been processed (ETA is 0) or if workflow has been given up (ETA is -1)
|
27
|
+
local remaining = redis.call("DECR", KEYS[6])
|
28
|
+
if remaining <= 0 then
|
29
|
+
redis.call("DEL", KEYS[5], KEYS[6], KEYS[8])
|
33
30
|
end
|
data/lib/pallets/cli.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
require 'optparse'
|
2
2
|
|
3
|
+
$stdout.sync = true
|
4
|
+
|
3
5
|
module Pallets
|
4
6
|
class CLI
|
5
7
|
def initialize
|
@@ -65,6 +67,10 @@ module Pallets
|
|
65
67
|
Pallets.configuration.failed_job_lifespan = failed_job_lifespan
|
66
68
|
end
|
67
69
|
|
70
|
+
opts.on('-m', '--failed-job-max-count NUM', Integer, 'Maximum number of jobs in the given up set') do |failed_job_max_count|
|
71
|
+
Pallets.configuration.failed_job_max_count = failed_job_max_count
|
72
|
+
end
|
73
|
+
|
68
74
|
opts.on('-p', '--pool-size NUM', Integer, 'Size of backend pool') do |pool_size|
|
69
75
|
Pallets.configuration.pool_size = pool_size
|
70
76
|
end
|
@@ -16,6 +16,10 @@ module Pallets
|
|
16
16
|
# this period, jobs will be permanently deleted
|
17
17
|
attr_accessor :failed_job_lifespan
|
18
18
|
|
19
|
+
# Maximum number of failed jobs that can be in the given up set. When this
|
20
|
+
# number is reached, the oldest jobs will be permanently deleted
|
21
|
+
attr_accessor :failed_job_max_count
|
22
|
+
|
19
23
|
# Number of seconds allowed for a job to be processed. If a job exceeds this
|
20
24
|
# period, it is considered failed, and scheduled to be processed again
|
21
25
|
attr_accessor :job_timeout
|
@@ -45,6 +49,7 @@ module Pallets
|
|
45
49
|
@blocking_timeout = 5
|
46
50
|
@concurrency = 2
|
47
51
|
@failed_job_lifespan = 7_776_000 # 3 months
|
52
|
+
@failed_job_max_count = 1_000
|
48
53
|
@job_timeout = 1_800 # 30 minutes
|
49
54
|
@max_failures = 3
|
50
55
|
@serializer = :json
|
data/lib/pallets/dsl/workflow.rb
CHANGED
@@ -1,7 +1,13 @@
|
|
1
1
|
module Pallets
|
2
2
|
module DSL
|
3
3
|
module Workflow
|
4
|
-
def task(arg, depends_on: nil, max_failures: nil,
|
4
|
+
def task(arg=nil, as: nil, depends_on: nil, max_failures: nil, **kwargs)
|
5
|
+
# Have to work more to keep Pallets' nice DSL valid in Ruby 2.7
|
6
|
+
arg = !kwargs.empty? ? kwargs : arg
|
7
|
+
raise ArgumentError, 'Task is incorrectly defined. It must receive '\
|
8
|
+
'either a name, or a name => dependencies pair as '\
|
9
|
+
'the first argument' unless arg
|
10
|
+
|
5
11
|
klass, dependencies = case arg
|
6
12
|
when Hash
|
7
13
|
# The `task Foo => Bar` notation
|
@@ -12,10 +18,13 @@ module Pallets
|
|
12
18
|
end
|
13
19
|
|
14
20
|
task_class = klass.to_s
|
21
|
+
as ||= task_class
|
22
|
+
|
15
23
|
dependencies = Array(dependencies).compact.uniq.map(&:to_s)
|
16
|
-
graph.add(
|
24
|
+
graph.add(as, dependencies)
|
17
25
|
|
18
|
-
task_config[
|
26
|
+
task_config[as] = {
|
27
|
+
'workflow_class' => self.name,
|
19
28
|
'task_class' => task_class,
|
20
29
|
'max_failures' => max_failures || Pallets.configuration.max_failures
|
21
30
|
}
|
data/lib/pallets/graph.rb
CHANGED
@@ -9,40 +9,39 @@ module Pallets
|
|
9
9
|
end
|
10
10
|
|
11
11
|
def add(node, dependencies)
|
12
|
-
|
12
|
+
raise WorkflowError, "Task #{node} is already defined in this workflow. "\
|
13
|
+
"Use `task '#{node}', as: 'FooBar'` to define an "\
|
14
|
+
"alias and reuse task" if nodes.key?(node)
|
15
|
+
|
16
|
+
nodes[node] = dependencies
|
13
17
|
end
|
14
18
|
|
15
19
|
def parents(node)
|
16
|
-
|
20
|
+
nodes[node]
|
17
21
|
end
|
18
22
|
|
19
23
|
def empty?
|
20
|
-
|
24
|
+
nodes.empty?
|
21
25
|
end
|
22
26
|
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
# Assign order to each node
|
30
|
-
i = 0
|
31
|
-
groups.flat_map do |group|
|
32
|
-
group_with_order = group.product([i])
|
33
|
-
i += group.size
|
34
|
-
group_with_order
|
27
|
+
def each
|
28
|
+
return enum_for(__method__) unless block_given?
|
29
|
+
|
30
|
+
tsort_each do |node|
|
31
|
+
yield(node, parents(node))
|
35
32
|
end
|
36
33
|
end
|
37
34
|
|
38
35
|
private
|
39
36
|
|
37
|
+
attr_reader :nodes
|
38
|
+
|
40
39
|
def tsort_each_node(&block)
|
41
|
-
|
40
|
+
nodes.each_key(&block)
|
42
41
|
end
|
43
42
|
|
44
43
|
def tsort_each_child(node, &block)
|
45
|
-
|
44
|
+
nodes.fetch(node).each(&block)
|
46
45
|
rescue KeyError
|
47
46
|
raise WorkflowError, "Task #{node} is marked as a dependency but not defined"
|
48
47
|
end
|
data/lib/pallets/logger.rb
CHANGED
@@ -5,14 +5,22 @@ module Pallets
|
|
5
5
|
class Logger < ::Logger
|
6
6
|
# Overwrite severity methods to add metadata capabilities
|
7
7
|
%i[debug info warn error fatal unknown].each do |severity|
|
8
|
-
define_method severity do |message
|
9
|
-
|
8
|
+
define_method severity do |message|
|
9
|
+
metadata = Thread.current[:pallets_log_metadata]
|
10
|
+
return super(message) if metadata.nil?
|
10
11
|
|
11
12
|
formatted_metadata = ' ' + metadata.map { |k, v| "#{k}=#{v}" }.join(' ')
|
12
13
|
super(formatted_metadata) { message }
|
13
14
|
end
|
14
15
|
end
|
15
16
|
|
17
|
+
def with_metadata(hash)
|
18
|
+
Thread.current[:pallets_log_metadata] = hash
|
19
|
+
yield
|
20
|
+
ensure
|
21
|
+
Thread.current[:pallets_log_metadata] = nil
|
22
|
+
end
|
23
|
+
|
16
24
|
module Formatters
|
17
25
|
class Pretty < ::Logger::Formatter
|
18
26
|
def call(severity, time, metadata, message)
|
@@ -25,10 +25,11 @@ module Pallets
|
|
25
25
|
transaction.params = filtered_context(context)
|
26
26
|
formatted_metadata(job).each { |kv| transaction.set_metadata(*kv) }
|
27
27
|
transaction.set_http_or_background_queue_start
|
28
|
-
Appsignal::Transaction.complete_current!
|
29
28
|
Appsignal.increment_counter('pallets_job_count', 1, status: job_status || :successful)
|
30
29
|
end
|
31
30
|
end
|
31
|
+
ensure
|
32
|
+
Appsignal::Transaction.complete_current!
|
32
33
|
end
|
33
34
|
|
34
35
|
def self.filtered_context(context)
|
@@ -2,14 +2,21 @@ module Pallets
|
|
2
2
|
module Middleware
|
3
3
|
class JobLogger
|
4
4
|
def self.call(worker, job, context)
|
5
|
-
|
6
|
-
|
7
|
-
Pallets.logger.
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
5
|
+
start_time = current_time
|
6
|
+
|
7
|
+
Pallets.logger.with_metadata(extract_metadata(worker.id, job)) do
|
8
|
+
begin
|
9
|
+
Pallets.logger.info 'Started'
|
10
|
+
result = yield
|
11
|
+
Pallets.logger.info "Done in #{(current_time - start_time).round(3)}s"
|
12
|
+
result
|
13
|
+
rescue => ex
|
14
|
+
Pallets.logger.warn "Failed after #{(current_time - start_time).round(3)}s"
|
15
|
+
Pallets.logger.warn "#{ex.class.name}: #{ex.message}"
|
16
|
+
Pallets.logger.warn ex.backtrace.join("\n") unless ex.backtrace.nil?
|
17
|
+
raise
|
18
|
+
end
|
19
|
+
end
|
13
20
|
end
|
14
21
|
|
15
22
|
def self.extract_metadata(wid, job)
|
@@ -21,6 +28,10 @@ module Pallets
|
|
21
28
|
tsk: job['task_class'],
|
22
29
|
}
|
23
30
|
end
|
31
|
+
|
32
|
+
def self.current_time
|
33
|
+
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
34
|
+
end
|
24
35
|
end
|
25
36
|
end
|
26
37
|
end
|
data/lib/pallets/version.rb
CHANGED
data/lib/pallets/worker.rb
CHANGED
@@ -53,8 +53,8 @@ module Pallets
|
|
53
53
|
rescue Pallets::Shutdown
|
54
54
|
@manager.remove_worker(self)
|
55
55
|
rescue => ex
|
56
|
-
Pallets.logger.error "#{ex.class.name}: #{ex.message}"
|
57
|
-
Pallets.logger.error ex.backtrace.join("\n")
|
56
|
+
Pallets.logger.error "#{ex.class.name}: #{ex.message}"
|
57
|
+
Pallets.logger.error ex.backtrace.join("\n") unless ex.backtrace.nil?
|
58
58
|
@manager.replace_worker(self)
|
59
59
|
end
|
60
60
|
|
@@ -64,8 +64,8 @@ module Pallets
|
|
64
64
|
rescue
|
65
65
|
# We ensure only valid jobs are created. If something fishy reaches this
|
66
66
|
# point, just give up on it
|
67
|
-
backend.
|
68
|
-
Pallets.logger.error "Could not deserialize #{job}. Gave up job"
|
67
|
+
backend.discard(job)
|
68
|
+
Pallets.logger.error "Could not deserialize #{job}. Gave up job"
|
69
69
|
return
|
70
70
|
end
|
71
71
|
|
@@ -103,7 +103,7 @@ module Pallets
|
|
103
103
|
retry_at = Time.now.to_f + backoff_in_seconds(failures)
|
104
104
|
backend.retry(new_job, job, retry_at)
|
105
105
|
else
|
106
|
-
backend.give_up(new_job, job)
|
106
|
+
backend.give_up(job_hash['wfid'], new_job, job)
|
107
107
|
end
|
108
108
|
end
|
109
109
|
|
@@ -112,11 +112,11 @@ module Pallets
|
|
112
112
|
'given_up_at' => Time.now.to_f,
|
113
113
|
'reason' => 'returned_false'
|
114
114
|
))
|
115
|
-
backend.give_up(new_job, job)
|
115
|
+
backend.give_up(job_hash['wfid'], new_job, job)
|
116
116
|
end
|
117
117
|
|
118
118
|
def handle_job_success(context, job, job_hash)
|
119
|
-
backend.save(job_hash['wfid'], job, serializer.dump_context(context.buffer))
|
119
|
+
backend.save(job_hash['wfid'], job_hash['jid'], job, serializer.dump_context(context.buffer))
|
120
120
|
end
|
121
121
|
|
122
122
|
def backoff_in_seconds(count)
|
data/lib/pallets/workflow.rb
CHANGED
@@ -4,6 +4,12 @@ module Pallets
|
|
4
4
|
|
5
5
|
attr_reader :context
|
6
6
|
|
7
|
+
def self.build(&block)
|
8
|
+
Class.new(self).tap do |workflow_class|
|
9
|
+
workflow_class.instance_eval(&block)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
7
13
|
def initialize(context_hash = {})
|
8
14
|
@id = nil
|
9
15
|
# Passed in context hash needs to be buffered
|
@@ -14,7 +20,7 @@ module Pallets
|
|
14
20
|
raise WorkflowError, "#{self.class.name} has no tasks. Workflows "\
|
15
21
|
"must contain at least one task" if self.class.graph.empty?
|
16
22
|
|
17
|
-
backend.run_workflow(id,
|
23
|
+
backend.run_workflow(id, *prepare_jobs, serializer.dump_context(context.buffer))
|
18
24
|
id
|
19
25
|
end
|
20
26
|
|
@@ -24,20 +30,29 @@ module Pallets
|
|
24
30
|
|
25
31
|
private
|
26
32
|
|
27
|
-
def
|
28
|
-
|
29
|
-
|
30
|
-
|
33
|
+
def prepare_jobs
|
34
|
+
jobs = []
|
35
|
+
jobmasks = Hash.new { |h, k| h[k] = [] }
|
36
|
+
acc = {}
|
37
|
+
|
38
|
+
self.class.graph.each do |task_alias, dependencies|
|
39
|
+
job_hash = construct_job(task_alias)
|
40
|
+
acc[task_alias] = job_hash['jid']
|
41
|
+
job = serializer.dump(job_hash)
|
42
|
+
|
43
|
+
jobs << [dependencies.size, job]
|
44
|
+
dependencies.each { |d| jobmasks[acc[d]] << [-1, job] }
|
31
45
|
end
|
46
|
+
|
47
|
+
[jobs, jobmasks]
|
32
48
|
end
|
33
49
|
|
34
|
-
def construct_job(
|
35
|
-
|
50
|
+
def construct_job(task_alias)
|
51
|
+
Hash[self.class.task_config[task_alias]].tap do |job|
|
36
52
|
job['wfid'] = id
|
37
|
-
job['jid'] = "J#{Pallets::Util.generate_id(task_class)}".upcase
|
38
|
-
job['workflow_class'] = self.class.name
|
53
|
+
job['jid'] = "J#{Pallets::Util.generate_id(job['task_class'])}".upcase
|
39
54
|
job['created_at'] = Time.now.to_f
|
40
|
-
end
|
55
|
+
end
|
41
56
|
end
|
42
57
|
|
43
58
|
def backend
|
@@ -48,6 +63,10 @@ module Pallets
|
|
48
63
|
Pallets.serializer
|
49
64
|
end
|
50
65
|
|
66
|
+
def self.name
|
67
|
+
@name ||= super || '<Anonymous>'
|
68
|
+
end
|
69
|
+
|
51
70
|
def self.task_config
|
52
71
|
@task_config ||= {}
|
53
72
|
end
|
data/pallets.gemspec
CHANGED
@@ -9,8 +9,8 @@ Gem::Specification.new do |spec|
|
|
9
9
|
spec.authors = ['Andrei Horak']
|
10
10
|
spec.email = ['linkyndy@gmail.com']
|
11
11
|
|
12
|
-
spec.summary = '
|
13
|
-
spec.description = '
|
12
|
+
spec.summary = 'Simple and reliable workflow engine, written in Ruby'
|
13
|
+
spec.description = 'Simple and reliable workflow engine, written in Ruby'
|
14
14
|
spec.homepage = 'https://github.com/linkyndy/pallets'
|
15
15
|
spec.license = 'MIT'
|
16
16
|
|
@@ -20,6 +20,6 @@ Gem::Specification.new do |spec|
|
|
20
20
|
|
21
21
|
spec.required_ruby_version = '>= 2.4'
|
22
22
|
|
23
|
-
spec.add_dependency 'redis'
|
23
|
+
spec.add_dependency 'redis', '~> 4.2'
|
24
24
|
spec.add_dependency 'msgpack'
|
25
25
|
end
|
metadata
CHANGED
@@ -1,29 +1,29 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: pallets
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.9.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Horak
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-07-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: redis
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
|
-
- - "
|
17
|
+
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '
|
19
|
+
version: '4.2'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- - "
|
24
|
+
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '
|
26
|
+
version: '4.2'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: msgpack
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -38,7 +38,7 @@ dependencies:
|
|
38
38
|
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
|
-
description:
|
41
|
+
description: Simple and reliable workflow engine, written in Ruby
|
42
42
|
email:
|
43
43
|
- linkyndy@gmail.com
|
44
44
|
executables:
|
@@ -46,6 +46,7 @@ executables:
|
|
46
46
|
extensions: []
|
47
47
|
extra_rdoc_files: []
|
48
48
|
files:
|
49
|
+
- ".github/FUNDING.yml"
|
49
50
|
- ".gitignore"
|
50
51
|
- ".rspec"
|
51
52
|
- ".travis.yml"
|
@@ -56,6 +57,8 @@ files:
|
|
56
57
|
- README.md
|
57
58
|
- Rakefile
|
58
59
|
- bin/pallets
|
60
|
+
- examples/aliases.rb
|
61
|
+
- examples/anonymous.rb
|
59
62
|
- examples/appsignal.rb
|
60
63
|
- examples/config/appsignal.yml
|
61
64
|
- examples/config_savvy.rb
|
@@ -64,6 +67,7 @@ files:
|
|
64
67
|
- lib/pallets.rb
|
65
68
|
- lib/pallets/backends/base.rb
|
66
69
|
- lib/pallets/backends/redis.rb
|
70
|
+
- lib/pallets/backends/scripts/discard.lua
|
67
71
|
- lib/pallets/backends/scripts/give_up.lua
|
68
72
|
- lib/pallets/backends/scripts/reschedule_all.lua
|
69
73
|
- lib/pallets/backends/scripts/retry.lua
|
@@ -95,7 +99,7 @@ homepage: https://github.com/linkyndy/pallets
|
|
95
99
|
licenses:
|
96
100
|
- MIT
|
97
101
|
metadata: {}
|
98
|
-
post_install_message:
|
102
|
+
post_install_message:
|
99
103
|
rdoc_options: []
|
100
104
|
require_paths:
|
101
105
|
- lib
|
@@ -110,8 +114,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
110
114
|
- !ruby/object:Gem::Version
|
111
115
|
version: '0'
|
112
116
|
requirements: []
|
113
|
-
rubygems_version: 3.
|
114
|
-
signing_key:
|
117
|
+
rubygems_version: 3.1.2
|
118
|
+
signing_key:
|
115
119
|
specification_version: 4
|
116
|
-
summary:
|
120
|
+
summary: Simple and reliable workflow engine, written in Ruby
|
117
121
|
test_files: []
|