cloudtasker 0.8.1 → 0.10.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +40 -0
- data/README.md +43 -2
- data/app/controllers/cloudtasker/worker_controller.rb +21 -3
- data/cloudtasker.gemspec +2 -2
- data/lib/cloudtasker.rb +1 -0
- data/lib/cloudtasker/backend/google_cloud_task.rb +25 -5
- data/lib/cloudtasker/backend/memory_task.rb +8 -23
- data/lib/cloudtasker/cloud_task.rb +2 -0
- data/lib/cloudtasker/config.rb +32 -2
- data/lib/cloudtasker/max_task_size_exceeded_error.rb +14 -0
- data/lib/cloudtasker/redis_client.rb +5 -1
- data/lib/cloudtasker/testing.rb +2 -2
- data/lib/cloudtasker/version.rb +1 -1
- data/lib/cloudtasker/worker_handler.rb +144 -7
- metadata +14 -13
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d4cba7de3e429d612adf6c9c2f4424b6ef73db39d4db93b70804800300011e1b
|
4
|
+
data.tar.gz: 3775cdf3f16430cf8decd49dfc28be9e26f0ef6a63d45224bdc5ed11b13a86fc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5e2e15dc54fad72e3508763855a99804b591126968ccbcaccd8211d51b8b1e28bf6d2907c746f2b14c53c3c065ce1eb06871f30b39419df1f3d7b8a4e1b1fded
|
7
|
+
data.tar.gz: a2808491a7251b5212587351deb84a99f688a62f7a54c3f5b9c8ebe3a3b6a1ca6adda6ffe898424069223cef8260f1a49c8a086010c0e59dedc5e50ed371e830
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,45 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## [v0.9.2](https://github.com/keypup-io/cloudtasker/tree/v0.9.2) (2020-03-04)
|
4
|
+
|
5
|
+
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.9.1...v0.9.2)
|
6
|
+
|
7
|
+
**Fixed bugs:**
|
8
|
+
- Cloud Task: ignore "not found" errors when trying to delete an already deleted task.
|
9
|
+
|
10
|
+
## [v0.9.1](https://github.com/keypup-io/cloudtasker/tree/v0.9.1) (2020-02-11)
|
11
|
+
|
12
|
+
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.9.0...v0.9.1)
|
13
|
+
|
14
|
+
**Fixed bugs:**
|
15
|
+
- Cloud Task: raise `Cloudtasker::MaxTaskSizeExceededError` if job payload exceeds 100 KB. This is mainly to have production parity in development when running the local processing server.
|
16
|
+
|
17
|
+
## [v0.9.0](https://github.com/keypup-io/cloudtasker/tree/v0.9.0) (2020-01-23)
|
18
|
+
|
19
|
+
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.8.2...v0.9.0)
|
20
|
+
|
21
|
+
**Fixed bugs:**
|
22
|
+
- Cloud Task: Base64 encode task body to support UTF-8 characters (e.g. emojis).
|
23
|
+
- Redis: Restrict to one connection (class level) to avoid too many DNS lookups
|
24
|
+
|
25
|
+
**Migration**
|
26
|
+
For Sinatra applications please update your Cloudtasker controller according to [this diff](https://github.com/keypup-io/cloudtasker/commit/311fa8f9beec91fbae012164a25b2ee6e261a2e4#diff-c2a0ea6c6e6c31c749d2e1acdc574f0f).
|
27
|
+
|
28
|
+
## [v0.8.2](https://github.com/keypup-io/cloudtasker/tree/v0.8.2) (2019-12-05)
|
29
|
+
|
30
|
+
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.8.1...v0.8.2)
|
31
|
+
|
32
|
+
**Fixed bugs:**
|
33
|
+
- Config: do not add processor host to `Rails.application.config.hosts` if originally empty.
|
34
|
+
|
35
|
+
## [v0.8.1](https://github.com/keypup-io/cloudtasker/tree/v0.8.1) (2019-12-03)
|
36
|
+
|
37
|
+
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.8.0...v0.8.1)
|
38
|
+
|
39
|
+
**Fixed bugs:**
|
40
|
+
- Local dev server: ensure job queue name is kept when taks is retried
|
41
|
+
- Rails/Controller: bypass Rails munge logic to preserve nil values inside job arguments.
|
42
|
+
|
3
43
|
## [v0.8.0](https://github.com/keypup-io/cloudtasker/tree/v0.8.0) (2019-11-27)
|
4
44
|
|
5
45
|
[Full Changelog](https://github.com/keypup-io/cloudtasker/compare/v0.7.0...v0.8.0)
|
data/README.md
CHANGED
@@ -224,7 +224,7 @@ Cloudtasker.configure do |config|
|
|
224
224
|
#
|
225
225
|
# config.max_retries = 10
|
226
226
|
|
227
|
-
#
|
227
|
+
#
|
228
228
|
# Specify the redis connection hash.
|
229
229
|
#
|
230
230
|
# This is ONLY required in development for the Cloudtasker local server and in
|
@@ -235,6 +235,24 @@ Cloudtasker.configure do |config|
|
|
235
235
|
# Default: redis-rb connects to redis://127.0.0.1:6379/0
|
236
236
|
#
|
237
237
|
# config.redis = { url: 'redis://localhost:6379/5' }
|
238
|
+
|
239
|
+
#
|
240
|
+
# Set to true to store job arguments in Redis instead of sending arguments as part
|
241
|
+
# of the job payload to Google Cloud Tasks.
|
242
|
+
#
|
243
|
+
# This is useful if you expect to process jobs with payloads exceeding 100KB, which
|
244
|
+
# is the limit enforced by Google Cloud Tasks.
|
245
|
+
#
|
246
|
+
# You can set this configuration parameter to a KB value if you want to store jobs
|
247
|
+
# args in redis only if the JSONified arguments payload exceeds that threshold.
|
248
|
+
#
|
249
|
+
# Default: false
|
250
|
+
#
|
251
|
+
# Store all job payloads in Redis:
|
252
|
+
# config.store_payloads_in_redis = true
|
253
|
+
#
|
254
|
+
# Store all job payloads in Redis exceeding 50 KB:
|
255
|
+
# config.store_payloads_in_redis = 50
|
238
256
|
end
|
239
257
|
```
|
240
258
|
|
@@ -635,7 +653,30 @@ If you enqueue this worker by omitting the second argument `MyWorker.perform_asy
|
|
635
653
|
- The `time_at` argument will be ignored by the `unique-job` extension, meaning that job uniqueness will be only based on the `user_id` argument.
|
636
654
|
|
637
655
|
### Handling big job payloads
|
638
|
-
|
656
|
+
Google Cloud Tasks enforces a limit of 100 KB for job payloads. Taking into accounts Cloudtasker authentication headers and meta information this leave ~85 KB of free space for JSONified job arguments.
|
657
|
+
|
658
|
+
Any excessive job payload (> 100 KB) will raise a `Cloudtasker::MaxTaskSizeExceededError`, both in production and development mode.
|
659
|
+
|
660
|
+
#### Option 1: Use Cloudtasker optional support for payload storage in Redis
|
661
|
+
Cloudtasker provides optional support for storing argument payloads in Redis instead of sending them to Google Cloud Tasks.
|
662
|
+
|
663
|
+
To enable it simply put the following in your Cloudtasker initializer:
|
664
|
+
```ruby
|
665
|
+
# config/initializers/cloudtasker.rb
|
666
|
+
|
667
|
+
Cloudtasker.configure do |config|
|
668
|
+
# Enable Redis support. Specify your redis connection
|
669
|
+
config.redis = { url: 'redis://localhost:6379/5' }
|
670
|
+
|
671
|
+
# Store all job payloads in Redis:
|
672
|
+
config.store_payloads_in_redis = true
|
673
|
+
|
674
|
+
# OR: store all job payloads in Redis exceeding 50 KB:
|
675
|
+
# config.store_payloads_in_redis = 50
|
676
|
+
end
|
677
|
+
```
|
678
|
+
|
679
|
+
#### Option 2: Do it yourself solution
|
639
680
|
|
640
681
|
If you feel that a job payload is going to get big, prefer to store the payload using a datastore (e.g. Redis) and pass a reference to the job to retrieve the payload inside your job `perform` method.
|
641
682
|
|
@@ -16,9 +16,6 @@ module Cloudtasker
|
|
16
16
|
# Run a worker from a Cloud Task payload
|
17
17
|
#
|
18
18
|
def run
|
19
|
-
# Build payload
|
20
|
-
payload = JSON.parse(request.body.read).merge(job_retries: job_retries)
|
21
|
-
|
22
19
|
# Process payload
|
23
20
|
WorkerHandler.execute_from_payload!(payload)
|
24
21
|
head :no_content
|
@@ -37,6 +34,27 @@ module Cloudtasker
|
|
37
34
|
|
38
35
|
private
|
39
36
|
|
37
|
+
#
|
38
|
+
# Parse the request body and return the actual job
|
39
|
+
# payload.
|
40
|
+
#
|
41
|
+
# @return [Hash] The job payload
|
42
|
+
#
|
43
|
+
def payload
|
44
|
+
@payload ||= begin
|
45
|
+
# Get raw body
|
46
|
+
content = request.body.read
|
47
|
+
|
48
|
+
# Decode content if the body is Base64 encoded
|
49
|
+
if request.headers[Cloudtasker::Config::ENCODING_HEADER].to_s.downcase == 'base64'
|
50
|
+
content = Base64.decode64(content)
|
51
|
+
end
|
52
|
+
|
53
|
+
# Return content parsed as JSON and add job retries count
|
54
|
+
JSON.parse(content).merge(job_retries: job_retries)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
40
58
|
#
|
41
59
|
# Extract the number of times this task failed at runtime.
|
42
60
|
#
|
data/cloudtasker.gemspec
CHANGED
@@ -39,10 +39,10 @@ Gem::Specification.new do |spec|
|
|
39
39
|
spec.add_development_dependency 'appraisal'
|
40
40
|
spec.add_development_dependency 'bundler', '~> 2.0'
|
41
41
|
spec.add_development_dependency 'github_changelog_generator'
|
42
|
-
spec.add_development_dependency 'rake', '
|
42
|
+
spec.add_development_dependency 'rake', '>= 12.3.3'
|
43
43
|
spec.add_development_dependency 'rspec', '~> 3.0'
|
44
44
|
spec.add_development_dependency 'rubocop', '0.76.0'
|
45
|
-
spec.add_development_dependency 'rubocop-rspec'
|
45
|
+
spec.add_development_dependency 'rubocop-rspec', '1.37.0'
|
46
46
|
spec.add_development_dependency 'timecop'
|
47
47
|
spec.add_development_dependency 'webmock'
|
48
48
|
|
data/lib/cloudtasker.rb
CHANGED
@@ -8,6 +8,7 @@ require 'cloudtasker/config'
|
|
8
8
|
require 'cloudtasker/authentication_error'
|
9
9
|
require 'cloudtasker/dead_worker_error'
|
10
10
|
require 'cloudtasker/invalid_worker_error'
|
11
|
+
require 'cloudtasker/max_task_size_exceeded_error'
|
11
12
|
|
12
13
|
require 'cloudtasker/middleware/chain'
|
13
14
|
require 'cloudtasker/authenticator'
|
@@ -82,6 +82,29 @@ module Cloudtasker
|
|
82
82
|
Google::Protobuf::Timestamp.new.tap { |e| e.seconds = schedule_time.to_i }
|
83
83
|
end
|
84
84
|
|
85
|
+
#
|
86
|
+
# Format the job payload sent to Cloud Tasks.
|
87
|
+
#
|
88
|
+
# @param [Hash] hash The worker payload.
|
89
|
+
#
|
90
|
+
# @return [Hash] The Cloud Task payloadd.
|
91
|
+
#
|
92
|
+
def self.format_task_payload(payload)
|
93
|
+
payload = JSON.parse(payload.to_json, symbolize_names: true) # deep dup
|
94
|
+
|
95
|
+
# Format schedule time to Google Protobuf timestamp
|
96
|
+
payload[:schedule_time] = format_schedule_time(payload[:schedule_time])
|
97
|
+
|
98
|
+
# Encode job content to support UTF-8. Google Cloud Task
|
99
|
+
# expect content to be ASCII-8BIT compatible (binary)
|
100
|
+
payload[:http_request][:headers] ||= {}
|
101
|
+
payload[:http_request][:headers][Cloudtasker::Config::CONTENT_TYPE_HEADER] = 'text/json'
|
102
|
+
payload[:http_request][:headers][Cloudtasker::Config::ENCODING_HEADER] = 'Base64'
|
103
|
+
payload[:http_request][:body] = Base64.encode64(payload[:http_request][:body])
|
104
|
+
|
105
|
+
payload
|
106
|
+
end
|
107
|
+
|
85
108
|
#
|
86
109
|
# Find a task by id.
|
87
110
|
#
|
@@ -104,10 +127,7 @@ module Cloudtasker
|
|
104
127
|
# @return [Cloudtasker::Backend::GoogleCloudTask, nil] The created task.
|
105
128
|
#
|
106
129
|
def self.create(payload)
|
107
|
-
|
108
|
-
payload = payload.merge(
|
109
|
-
schedule_time: format_schedule_time(payload[:schedule_time])
|
110
|
-
).compact
|
130
|
+
payload = format_task_payload(payload)
|
111
131
|
|
112
132
|
# Extract relative queue name
|
113
133
|
relative_queue = payload.delete(:queue)
|
@@ -126,7 +146,7 @@ module Cloudtasker
|
|
126
146
|
#
|
127
147
|
def self.delete(id)
|
128
148
|
client.delete_task(id)
|
129
|
-
rescue Google::Gax::RetryError
|
149
|
+
rescue Google::Gax::RetryError, GRPC::NotFound, Google::Gax::PermissionDeniedError
|
130
150
|
nil
|
131
151
|
end
|
132
152
|
|
@@ -7,6 +7,7 @@ module Cloudtasker
|
|
7
7
|
# Manage local tasks pushed to memory.
|
8
8
|
# Used for testing.
|
9
9
|
class MemoryTask
|
10
|
+
attr_accessor :job_retries
|
10
11
|
attr_reader :id, :http_request, :schedule_time, :queue
|
11
12
|
|
12
13
|
#
|
@@ -18,17 +19,6 @@ module Cloudtasker
|
|
18
19
|
@queue ||= []
|
19
20
|
end
|
20
21
|
|
21
|
-
#
|
22
|
-
# Return the workers currently in the queue.
|
23
|
-
#
|
24
|
-
# @param [String] worker_class_name Filter jobs on worker class name.
|
25
|
-
#
|
26
|
-
# @return [Array<Cloudtasker::Worker] The list of workers
|
27
|
-
#
|
28
|
-
def self.jobs(worker_class_name = nil)
|
29
|
-
all(worker_class_name).map(&:worker)
|
30
|
-
end
|
31
|
-
|
32
22
|
#
|
33
23
|
# Run all Tasks in the queue. Optionally filter which tasks to run based
|
34
24
|
# on the worker class name.
|
@@ -116,11 +106,12 @@ module Cloudtasker
|
|
116
106
|
# @param [Hash] http_request The HTTP request content.
|
117
107
|
# @param [Integer] schedule_time When to run the task (Unix timestamp)
|
118
108
|
#
|
119
|
-
def initialize(id:, http_request:, schedule_time: nil, queue: nil)
|
109
|
+
def initialize(id:, http_request:, schedule_time: nil, queue: nil, job_retries: 0)
|
120
110
|
@id = id
|
121
111
|
@http_request = http_request
|
122
112
|
@schedule_time = Time.at(schedule_time || 0)
|
123
113
|
@queue = queue
|
114
|
+
@job_retries = job_retries || 0
|
124
115
|
end
|
125
116
|
|
126
117
|
#
|
@@ -155,26 +146,20 @@ module Cloudtasker
|
|
155
146
|
}
|
156
147
|
end
|
157
148
|
|
158
|
-
#
|
159
|
-
# Return the worker attached to this task.
|
160
|
-
#
|
161
|
-
# @return [Cloudtasker::Worker] The task worker.
|
162
|
-
#
|
163
|
-
def worker
|
164
|
-
@worker ||= Worker.from_hash(payload)
|
165
|
-
end
|
166
|
-
|
167
149
|
#
|
168
150
|
# Execute the task.
|
169
151
|
#
|
170
152
|
# @return [Any] The return value of the worker perform method.
|
171
153
|
#
|
172
154
|
def execute
|
173
|
-
|
155
|
+
# Execute worker
|
156
|
+
resp = WorkerHandler.with_worker_handling(payload, &:execute)
|
157
|
+
|
158
|
+
# Delete task
|
174
159
|
self.class.delete(id)
|
175
160
|
resp
|
176
161
|
rescue StandardError
|
177
|
-
|
162
|
+
self.job_retries += 1
|
178
163
|
end
|
179
164
|
|
180
165
|
#
|
@@ -48,6 +48,8 @@ module Cloudtasker
|
|
48
48
|
# @return [Cloudtasker::CloudTask] The created task.
|
49
49
|
#
|
50
50
|
def self.create(payload)
|
51
|
+
raise MaxTaskSizeExceededError if payload.to_json.bytesize > Config::MAX_TASK_SIZE
|
52
|
+
|
51
53
|
resp = backend.create(payload)&.to_h
|
52
54
|
resp ? new(resp) : nil
|
53
55
|
end
|
data/lib/cloudtasker/config.rb
CHANGED
@@ -5,13 +5,25 @@ require 'logger'
|
|
5
5
|
module Cloudtasker
|
6
6
|
# Holds cloudtasker configuration. See Cloudtasker#configure
|
7
7
|
class Config
|
8
|
-
attr_accessor :redis
|
8
|
+
attr_accessor :redis, :store_payloads_in_redis
|
9
9
|
attr_writer :secret, :gcp_location_id, :gcp_project_id,
|
10
10
|
:gcp_queue_prefix, :processor_path, :logger, :mode, :max_retries
|
11
11
|
|
12
|
+
# Max Cloud Task size in bytes
|
13
|
+
MAX_TASK_SIZE = 100 * 1024 # 100 KB
|
14
|
+
|
12
15
|
# Retry header in Cloud Task responses
|
13
16
|
RETRY_HEADER = 'X-CloudTasks-TaskExecutionCount'
|
14
17
|
|
18
|
+
# Content-Transfer-Encoding header in Cloud Task responses
|
19
|
+
ENCODING_HEADER = 'Content-Transfer-Encoding'
|
20
|
+
|
21
|
+
# Content Type
|
22
|
+
CONTENT_TYPE_HEADER = 'Content-Type'
|
23
|
+
|
24
|
+
# Authorization header
|
25
|
+
AUTHORIZATION_HEADER = 'Authorization'
|
26
|
+
|
15
27
|
# Default values
|
16
28
|
DEFAULT_LOCATION_ID = 'us-east1'
|
17
29
|
DEFAULT_PROCESSOR_PATH = '/cloudtasker/run'
|
@@ -42,6 +54,21 @@ module Cloudtasker
|
|
42
54
|
Please specify a secret in the cloudtasker initializer or add Rails secret_key_base in your credentials
|
43
55
|
DOC
|
44
56
|
|
57
|
+
#
|
58
|
+
# Return the threshold above which job arguments must be stored
|
59
|
+
# in Redis instead of being sent to the backend as part of the job
|
60
|
+
# payload.
|
61
|
+
#
|
62
|
+
# Return nil if redis payload storage is disabled.
|
63
|
+
#
|
64
|
+
# @return [Integer, nil] The threshold above which payloads will be stored in Redis.
|
65
|
+
#
|
66
|
+
def redis_payload_storage_threshold
|
67
|
+
return nil unless store_payloads_in_redis
|
68
|
+
|
69
|
+
store_payloads_in_redis.respond_to?(:to_i) ? store_payloads_in_redis.to_i : 0
|
70
|
+
end
|
71
|
+
|
45
72
|
#
|
46
73
|
# The number of times jobs will be retried. This number of
|
47
74
|
# retries does not include failures due to the application being unreachable.
|
@@ -102,7 +129,10 @@ module Cloudtasker
|
|
102
129
|
@processor_host = val
|
103
130
|
|
104
131
|
# Check if Rails supports host filtering
|
105
|
-
return unless val &&
|
132
|
+
return unless val &&
|
133
|
+
defined?(Rails) &&
|
134
|
+
Rails.application.config.respond_to?(:hosts) &&
|
135
|
+
Rails.application.config.hosts&.any?
|
106
136
|
|
107
137
|
# Add processor host to the list of authorized hosts
|
108
138
|
Rails.application.config.hosts << val.gsub(%r{https?://}, '')
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Cloudtasker
|
4
|
+
# Handle Cloud Task size quota
|
5
|
+
# See: https://cloud.google.com/appengine/quotas#Task_Queue
|
6
|
+
#
|
7
|
+
class MaxTaskSizeExceededError < StandardError
|
8
|
+
MSG = 'The size of Cloud Tasks must not exceed 100KB'
|
9
|
+
|
10
|
+
def initialize(msg = MSG)
|
11
|
+
super
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -8,13 +8,17 @@ module Cloudtasker
|
|
8
8
|
# Suffix added to cache keys when locking them
|
9
9
|
LOCK_KEY_PREFIX = 'cloudtasker/lock'
|
10
10
|
|
11
|
+
def self.client
|
12
|
+
@client ||= Redis.new(Cloudtasker.config.redis || {})
|
13
|
+
end
|
14
|
+
|
11
15
|
#
|
12
16
|
# Return the underlying redis client.
|
13
17
|
#
|
14
18
|
# @return [Redis] The redis client.
|
15
19
|
#
|
16
20
|
def client
|
17
|
-
@client ||=
|
21
|
+
@client ||= self.class.client
|
18
22
|
end
|
19
23
|
|
20
24
|
#
|
data/lib/cloudtasker/testing.rb
CHANGED
@@ -114,10 +114,10 @@ module Cloudtasker
|
|
114
114
|
#
|
115
115
|
# Return all jobs related to this worker class.
|
116
116
|
#
|
117
|
-
# @return [Array<Cloudtasker::
|
117
|
+
# @return [Array<Cloudtasker::Backend::MemoryTask>] The list of tasks
|
118
118
|
#
|
119
119
|
def jobs
|
120
|
-
Backend::MemoryTask.
|
120
|
+
Backend::MemoryTask.all(to_s)
|
121
121
|
end
|
122
122
|
|
123
123
|
#
|
data/lib/cloudtasker/version.rb
CHANGED
@@ -10,16 +10,113 @@ module Cloudtasker
|
|
10
10
|
# Alrogith used to sign the verification token
|
11
11
|
JWT_ALG = 'HS256'
|
12
12
|
|
13
|
+
# Sub-namespace to use for redis keys when storing
|
14
|
+
# payloads in Redis
|
15
|
+
REDIS_PAYLOAD_NAMESPACE = 'payload'
|
16
|
+
|
17
|
+
# Arg payload cache keys get expired instead of deleted
|
18
|
+
# in case jobs are re-processed due to connection interruption
|
19
|
+
# (job is successful but Cloud Task considers it as failed due
|
20
|
+
# to network interruption)
|
21
|
+
ARGS_PAYLOAD_CLEANUP_TTL = 3600 # 1 hour
|
22
|
+
|
23
|
+
#
|
24
|
+
# Return a namespaced key
|
25
|
+
#
|
26
|
+
# @param [String, Symbol] val The key to namespace
|
27
|
+
#
|
28
|
+
# @return [String] The namespaced key.
|
29
|
+
#
|
30
|
+
def self.key(val)
|
31
|
+
return nil if val.nil?
|
32
|
+
|
33
|
+
[to_s.underscore, val.to_s].join('/')
|
34
|
+
end
|
35
|
+
|
36
|
+
#
|
37
|
+
# Return the cloudtasker redis client
|
38
|
+
#
|
39
|
+
# @return [Cloudtasker::RedisClient] The cloudtasker redis client.
|
40
|
+
#
|
41
|
+
def self.redis
|
42
|
+
@redis ||= begin
|
43
|
+
require 'cloudtasker/redis_client'
|
44
|
+
RedisClient.new
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
13
48
|
#
|
14
49
|
# Execute a task worker from a task payload
|
15
50
|
#
|
16
|
-
# @param [Hash]
|
51
|
+
# @param [Hash] input_payload The Cloud Task payload.
|
17
52
|
#
|
18
53
|
# @return [Any] The return value of the worker perform method.
|
19
54
|
#
|
20
|
-
def self.execute_from_payload!(
|
55
|
+
def self.execute_from_payload!(input_payload)
|
56
|
+
with_worker_handling(input_payload, &:execute)
|
57
|
+
end
|
58
|
+
|
59
|
+
# TODO: do not delete redis payload if job has been re-enqueued
|
60
|
+
# worker.job_reenqueued
|
61
|
+
#
|
62
|
+
# Idea: change with_worker_handling to with_worker_handling and build the worker
|
63
|
+
# inside the with_worker_handling block.
|
64
|
+
#
|
65
|
+
# Local middleware used to retrieve the job arg payload from cache
|
66
|
+
# if a arg payload reference is present.
|
67
|
+
#
|
68
|
+
# @param [Hash] payload The full job payload
|
69
|
+
#
|
70
|
+
# @yield [Hash] The actual payload to use to process the job.
|
71
|
+
#
|
72
|
+
# @return [Any] The block result
|
73
|
+
#
|
74
|
+
def self.with_worker_handling(input_payload)
|
75
|
+
# Extract payload information
|
76
|
+
extracted_payload = extract_payload(input_payload)
|
77
|
+
payload = extracted_payload[:payload]
|
78
|
+
args_payload_key = extracted_payload[:args_payload_key]
|
79
|
+
|
80
|
+
# Build worker
|
21
81
|
worker = Cloudtasker::Worker.from_hash(payload) || raise(InvalidWorkerError)
|
22
|
-
|
82
|
+
|
83
|
+
# Yied worker
|
84
|
+
resp = yield(worker)
|
85
|
+
|
86
|
+
# Schedule args payload deletion after job has been successfully processed
|
87
|
+
# Note: we expire the key instead of deleting it immediately in case the job
|
88
|
+
# succeeds but is considered as failed by Cloud Task due to network interruption.
|
89
|
+
# In such case the job is likely to be re-processed soon after.
|
90
|
+
redis.expire(args_payload_key, ARGS_PAYLOAD_CLEANUP_TTL) if args_payload_key && !worker.job_reenqueued
|
91
|
+
|
92
|
+
resp
|
93
|
+
rescue DeadWorkerError => e
|
94
|
+
# Delete stored args payload if job is dead
|
95
|
+
redis.expire(args_payload_key, ARGS_PAYLOAD_CLEANUP_TTL) if args_payload_key
|
96
|
+
raise(e)
|
97
|
+
end
|
98
|
+
|
99
|
+
#
|
100
|
+
# Return the argument payload key (if present) along with the actual worker payload.
|
101
|
+
#
|
102
|
+
# If the payload was stored in Redis then retrieve it.
|
103
|
+
#
|
104
|
+
# @return [Hash] Hash
|
105
|
+
#
|
106
|
+
def self.extract_payload(input_payload)
|
107
|
+
# Get references
|
108
|
+
payload = JSON.parse(input_payload.to_json, symbolize_names: true)
|
109
|
+
args_payload_id = payload.delete(:job_args_payload_id)
|
110
|
+
args_payload_key = args_payload_id ? key([REDIS_PAYLOAD_NAMESPACE, args_payload_id].join('/')) : nil
|
111
|
+
|
112
|
+
# Retrieve the actual worker args payload
|
113
|
+
args_payload = args_payload_key ? redis.fetch(args_payload_key) : payload[:job_args]
|
114
|
+
|
115
|
+
# Return the payload
|
116
|
+
{
|
117
|
+
args_payload_key: args_payload_key,
|
118
|
+
payload: payload.merge(job_args: args_payload)
|
119
|
+
}
|
23
120
|
end
|
24
121
|
|
25
122
|
#
|
@@ -42,8 +139,8 @@ module Cloudtasker
|
|
42
139
|
http_method: 'POST',
|
43
140
|
url: Cloudtasker.config.processor_url,
|
44
141
|
headers: {
|
45
|
-
|
46
|
-
|
142
|
+
Cloudtasker::Config::CONTENT_TYPE_HEADER => 'application/json',
|
143
|
+
Cloudtasker::Config::AUTHORIZATION_HEADER => "Bearer #{Authenticator.verification_token}"
|
47
144
|
},
|
48
145
|
body: worker_payload.to_json
|
49
146
|
},
|
@@ -51,6 +148,47 @@ module Cloudtasker
|
|
51
148
|
}
|
52
149
|
end
|
53
150
|
|
151
|
+
#
|
152
|
+
# Return true if the worker args must be stored in Redis.
|
153
|
+
#
|
154
|
+
# @return [Boolean] True if the payload must be stored in redis.
|
155
|
+
#
|
156
|
+
def store_payload_in_redis?
|
157
|
+
Cloudtasker.config.redis_payload_storage_threshold &&
|
158
|
+
worker.job_args.to_json.bytesize > (Cloudtasker.config.redis_payload_storage_threshold * 1024)
|
159
|
+
end
|
160
|
+
|
161
|
+
#
|
162
|
+
# Return the payload to use for job arguments. This payload
|
163
|
+
# is merged inside the #worker_payload.
|
164
|
+
#
|
165
|
+
# If the argument payload must be stored in Redis then returns:
|
166
|
+
# `{ job_args_payload_id: <worker_id> }`
|
167
|
+
#
|
168
|
+
# If the argument payload must be natively handled by the backend
|
169
|
+
# then returns:
|
170
|
+
# `{ job_args: [...] }`
|
171
|
+
#
|
172
|
+
# @return [Hash] The worker args payload.
|
173
|
+
#
|
174
|
+
def worker_args_payload
|
175
|
+
@worker_args_payload ||= begin
|
176
|
+
if store_payload_in_redis?
|
177
|
+
# Store payload in Redis
|
178
|
+
self.class.redis.write(
|
179
|
+
self.class.key([REDIS_PAYLOAD_NAMESPACE, worker.job_id].join('/')),
|
180
|
+
worker.job_args
|
181
|
+
)
|
182
|
+
|
183
|
+
# Return reference to args payload
|
184
|
+
{ job_args_payload_id: worker.job_id }
|
185
|
+
else
|
186
|
+
# Return regular job args payload
|
187
|
+
{ job_args: worker.job_args }
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
54
192
|
#
|
55
193
|
# Return the task payload that Google Task will eventually
|
56
194
|
# send to the job processor.
|
@@ -68,9 +206,8 @@ module Cloudtasker
|
|
68
206
|
worker: worker.job_class_name,
|
69
207
|
job_queue: worker.job_queue,
|
70
208
|
job_id: worker.job_id,
|
71
|
-
job_args: worker.job_args,
|
72
209
|
job_meta: worker.job_meta.to_h
|
73
|
-
}
|
210
|
+
}.merge(worker_args_payload)
|
74
211
|
end
|
75
212
|
|
76
213
|
#
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: cloudtasker
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.10.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Arnaud Lachaume
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-03-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -126,16 +126,16 @@ dependencies:
|
|
126
126
|
name: rake
|
127
127
|
requirement: !ruby/object:Gem::Requirement
|
128
128
|
requirements:
|
129
|
-
- - "
|
129
|
+
- - ">="
|
130
130
|
- !ruby/object:Gem::Version
|
131
|
-
version:
|
131
|
+
version: 12.3.3
|
132
132
|
type: :development
|
133
133
|
prerelease: false
|
134
134
|
version_requirements: !ruby/object:Gem::Requirement
|
135
135
|
requirements:
|
136
|
-
- - "
|
136
|
+
- - ">="
|
137
137
|
- !ruby/object:Gem::Version
|
138
|
-
version:
|
138
|
+
version: 12.3.3
|
139
139
|
- !ruby/object:Gem::Dependency
|
140
140
|
name: rspec
|
141
141
|
requirement: !ruby/object:Gem::Requirement
|
@@ -168,16 +168,16 @@ dependencies:
|
|
168
168
|
name: rubocop-rspec
|
169
169
|
requirement: !ruby/object:Gem::Requirement
|
170
170
|
requirements:
|
171
|
-
- -
|
171
|
+
- - '='
|
172
172
|
- !ruby/object:Gem::Version
|
173
|
-
version:
|
173
|
+
version: 1.37.0
|
174
174
|
type: :development
|
175
175
|
prerelease: false
|
176
176
|
version_requirements: !ruby/object:Gem::Requirement
|
177
177
|
requirements:
|
178
|
-
- -
|
178
|
+
- - '='
|
179
179
|
- !ruby/object:Gem::Version
|
180
|
-
version:
|
180
|
+
version: 1.37.0
|
181
181
|
- !ruby/object:Gem::Dependency
|
182
182
|
name: timecop
|
183
183
|
requirement: !ruby/object:Gem::Requirement
|
@@ -320,6 +320,7 @@ files:
|
|
320
320
|
- lib/cloudtasker/engine.rb
|
321
321
|
- lib/cloudtasker/invalid_worker_error.rb
|
322
322
|
- lib/cloudtasker/local_server.rb
|
323
|
+
- lib/cloudtasker/max_task_size_exceeded_error.rb
|
323
324
|
- lib/cloudtasker/meta_store.rb
|
324
325
|
- lib/cloudtasker/middleware/chain.rb
|
325
326
|
- lib/cloudtasker/redis_client.rb
|
@@ -363,12 +364,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
363
364
|
version: '0'
|
364
365
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
365
366
|
requirements:
|
366
|
-
- - "
|
367
|
+
- - ">"
|
367
368
|
- !ruby/object:Gem::Version
|
368
|
-
version:
|
369
|
+
version: 1.3.1
|
369
370
|
requirements: []
|
370
371
|
rubyforge_project:
|
371
|
-
rubygems_version: 2.7.
|
372
|
+
rubygems_version: 2.7.6.2
|
372
373
|
signing_key:
|
373
374
|
specification_version: 4
|
374
375
|
summary: Background jobs for Ruby using Google Cloud Tasks (beta)
|