cloudtasker 0.3.0 → 0.8.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -7,7 +7,7 @@ module Cloudtasker
7
7
  class Config
8
8
  attr_accessor :redis
9
9
  attr_writer :secret, :gcp_location_id, :gcp_project_id,
10
- :gcp_queue_id, :processor_path, :logger, :mode, :max_retries
10
+ :gcp_queue_prefix, :processor_path, :logger, :mode, :max_retries
11
11
 
12
12
  # Retry header in Cloud Task responses
13
13
  RETRY_HEADER = 'X-CloudTasks-TaskExecutionCount'
@@ -16,6 +16,11 @@ module Cloudtasker
16
16
  DEFAULT_LOCATION_ID = 'us-east1'
17
17
  DEFAULT_PROCESSOR_PATH = '/cloudtasker/run'
18
18
 
19
+ # Default queue values
20
+ DEFAULT_JOB_QUEUE = 'default'
21
+ DEFAULT_QUEUE_CONCURRENCY = 10
22
+ DEFAULT_QUEUE_RETRIES = -1 # unlimited
23
+
19
24
  # The number of times jobs will be attempted before declaring them dead
20
25
  DEFAULT_MAX_RETRY_ATTEMPTS = 25
21
26
 
@@ -23,9 +28,10 @@ module Cloudtasker
23
28
  Missing host for processing.
24
29
  Please specify a processor hostname in form of `https://some-public-dns.example.com`'
25
30
  DOC
26
- QUEUE_ID_MISSING_ERROR = <<~DOC
27
- Missing GCP queue ID.
28
- Please specify a queue ID in the form of `my-queue-id`. You can create a queue using the Google SDK via `gcloud tasks queues create my-queue-id`
31
+ QUEUE_PREFIX_MISSING_ERROR = <<~DOC
32
+ Missing GCP queue prefix.
33
+ Please specify a queue prefix in the form of `my-app`.
34
+ You can create a default queue using the Google SDK via `gcloud tasks queues create my-app-default`
29
35
  DOC
30
36
  PROJECT_ID_MISSING_ERROR = <<~DOC
31
37
  Missing GCP project ID.
@@ -121,12 +127,12 @@ module Cloudtasker
121
127
  end
122
128
 
123
129
  #
124
- # Return the ID of GCP queue where tasks will be added.
130
+ # Return the prefix used for queues.
125
131
  #
126
- # @return [String] The ID of the processing queue.
132
+ # @return [String] The prefix of the processing queues.
127
133
  #
128
- def gcp_queue_id
129
- @gcp_queue_id || raise(StandardError, QUEUE_ID_MISSING_ERROR)
134
+ def gcp_queue_prefix
135
+ @gcp_queue_prefix || raise(StandardError, QUEUE_PREFIX_MISSING_ERROR)
130
136
  end
131
137
 
132
138
  #
@@ -105,10 +105,10 @@ module Cloudtasker
105
105
  #
106
106
  # Return the cloudtasker redis client
107
107
  #
108
- # @return [Class] The redis client.
108
+ # @return [Cloudtasker::RedisClient] The cloudtasker redis client..
109
109
  #
110
110
  def redis
111
- RedisClient
111
+ @redis ||= RedisClient.new
112
112
  end
113
113
 
114
114
  #
@@ -1,12 +1,13 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'fugit'
4
+ require 'cloudtasker/worker_wrapper'
4
5
 
5
6
  module Cloudtasker
6
7
  module Cron
7
8
  # Manage cron schedules
8
9
  class Schedule
9
- attr_accessor :id, :cron, :worker, :task_id, :job_id
10
+ attr_accessor :id, :cron, :worker, :task_id, :job_id, :queue, :args
10
11
 
11
12
  # Key Namespace used for object saved under this class
12
13
  SUB_NAMESPACE = 'schedule'
@@ -14,10 +15,10 @@ module Cloudtasker
14
15
  #
15
16
  # Return the redis client.
16
17
  #
17
- # @return [Class] The redis client
18
+ # @return [Cloudtasker::RedisClient] The cloudtasker redis client.
18
19
  #
19
20
  def self.redis
20
- RedisClient
21
+ @redis ||= RedisClient.new
21
22
  end
22
23
 
23
24
  #
@@ -72,8 +73,10 @@ module Cloudtasker
72
73
  # @return [Cloudtasker::Cron::Schedule] The schedule instance.
73
74
  #
74
75
  def self.create(**opts)
75
- config = find(opts[:id]).to_h.merge(opts)
76
- new(config).tap(&:save)
76
+ redis.with_lock(key(opts[:id])) do
77
+ config = find(opts[:id]).to_h.merge(opts)
78
+ new(config).tap(&:save)
79
+ end
77
80
  end
78
81
 
79
82
  #
@@ -95,12 +98,14 @@ module Cloudtasker
95
98
  # @param [String] id The schedule id.
96
99
  #
97
100
  def self.delete(id)
98
- schedule = find(id)
99
- return false unless schedule
101
+ redis.with_lock(key(id)) do
102
+ schedule = find(id)
103
+ return false unless schedule
100
104
 
101
- # Delete task and stored schedule
102
- CloudTask.delete(schedule.task_id) if schedule.task_id
103
- redis.del(schedule.gid)
105
+ # Delete task and stored schedule
106
+ CloudTask.delete(schedule.task_id) if schedule.task_id
107
+ redis.del(schedule.gid)
108
+ end
104
109
  end
105
110
 
106
111
  #
@@ -109,21 +114,25 @@ module Cloudtasker
109
114
  # @param [String] id The schedule id.
110
115
  # @param [String] cron The cron expression.
111
116
  # @param [Class] worker The worker class to run.
117
+ # @param [Array<any>] args The worker arguments.
118
+ # @param [String] queue The queue to use for the cron job.
112
119
  # @param [String] task_id The ID of the actual backend task.
113
120
  # @param [String] job_id The ID of the Cloudtasker worker.
114
121
  #
115
- def initialize(id:, cron:, worker:, task_id: nil, job_id: nil)
122
+ def initialize(id:, cron:, worker:, **opts)
116
123
  @id = id
117
124
  @cron = cron
118
125
  @worker = worker
119
- @task_id = task_id
120
- @job_id = job_id
126
+ @args = opts[:args]
127
+ @queue = opts[:queue]
128
+ @task_id = opts[:task_id]
129
+ @job_id = opts[:job_id]
121
130
  end
122
131
 
123
132
  #
124
133
  # Return the redis client.
125
134
  #
126
- # @return [Class] The redis client
135
+ # @return [Cloudtasker::RedisClient] The cloudtasker redis client.
127
136
  #
128
137
  def redis
129
138
  self.class.redis
@@ -187,7 +196,9 @@ module Cloudtasker
187
196
  {
188
197
  id: id,
189
198
  cron: cron,
190
- worker: worker
199
+ worker: worker,
200
+ args: args,
201
+ queue: queue
191
202
  }
192
203
  end
193
204
 
@@ -197,13 +208,10 @@ module Cloudtasker
197
208
  # @return [Hash] The attributes hash.
198
209
  #
199
210
  def to_h
200
- {
201
- id: id,
202
- cron: cron,
203
- worker: worker,
211
+ to_config.merge(
204
212
  task_id: task_id,
205
213
  job_id: job_id
206
- }
214
+ )
207
215
  end
208
216
 
209
217
  #
@@ -215,6 +223,15 @@ module Cloudtasker
215
223
  @cron_schedule ||= Fugit::Cron.parse(cron)
216
224
  end
217
225
 
226
+ #
227
+ # Return an instance of the underlying worker.
228
+ #
229
+ # @return [Cloudtasker::WorkerWrapper] The worker instance
230
+ #
231
+ def worker_instance
232
+ WorkerWrapper.new(worker_name: worker, job_args: args, job_queue: queue)
233
+ end
234
+
218
235
  #
219
236
  # Return the next time a job should run.
220
237
  #
@@ -275,7 +292,6 @@ module Cloudtasker
275
292
  CloudTask.delete(task_id) if task_id
276
293
 
277
294
  # Schedule worker
278
- worker_instance = Object.const_get(worker).new
279
295
  Job.new(worker_instance).set(schedule_id: id).schedule!
280
296
  end
281
297
  end
@@ -9,6 +9,9 @@ module Cloudtasker
9
9
  # Max number of task requests sent to the processing server
10
10
  CONCURRENCY = (ENV['CLOUDTASKER_CONCURRENCY'] || 5).to_i
11
11
 
12
+ # Default number of threads to allocate to process a specific queue
13
+ QUEUE_CONCURRENCY = 1
14
+
12
15
  #
13
16
  # Stop the local server.
14
17
  #
@@ -16,7 +19,7 @@ module Cloudtasker
16
19
  @done = true
17
20
 
18
21
  # Terminate threads and repush tasks
19
- @threads&.each do |t|
22
+ @threads&.values&.flatten&.each do |t|
20
23
  t.terminate
21
24
  t['task']&.retry_later(0, is_error: false)
22
25
  end
@@ -28,11 +31,21 @@ module Cloudtasker
28
31
  #
29
32
  # Start the local server
30
33
  #
34
+ # @param [Hash] opts Server options.
35
+ #
31
36
  #
32
- def start
37
+ def start(opts = {})
38
+ # Extract queues to process
39
+ queues = opts[:queues].to_a.any? ? opts[:queues] : [[nil, CONCURRENCY]]
40
+
41
+ # Display start banner
42
+ queue_labels = queues.map { |n, c| "#{n || 'all'}=#{c || QUEUE_CONCURRENCY}" }.join(' ')
43
+ Cloudtasker.logger.info("[Cloudtasker/Server] Processing queues: #{queue_labels}")
44
+
45
+ # Start processing queues
33
46
  @start ||= Thread.new do
34
47
  until @done
35
- process_jobs
48
+ queues.each { |(n, c)| process_jobs(n, c) }
36
49
  sleep 1
37
50
  end
38
51
  Cloudtasker.logger.info('[Cloudtasker/Server] Local server exiting...')
@@ -43,31 +56,40 @@ module Cloudtasker
43
56
  # Process enqueued workers.
44
57
  #
45
58
  #
46
- def process_jobs
47
- @threads ||= []
59
+ def process_jobs(queue = nil, concurrency = nil)
60
+ @threads ||= {}
61
+ @threads[queue] ||= []
62
+ max_threads = (concurrency || QUEUE_CONCURRENCY).to_i
48
63
 
49
64
  # Remove any done thread
50
- @threads.select!(&:alive?)
65
+ @threads[queue].select!(&:alive?)
51
66
 
52
67
  # Process tasks
53
- while @threads.count < CONCURRENCY && (task = Cloudtasker::Backend::RedisTask.pop)
54
- @threads << Thread.new do
55
- Thread.current['task'] = task
56
- Thread.current['attempts'] = 0
68
+ while @threads[queue].count < max_threads && (task = Cloudtasker::Backend::RedisTask.pop(queue))
69
+ @threads[queue] << Thread.new { process_task(task) }
70
+ end
71
+ end
57
72
 
58
- # Deliver task
59
- begin
60
- Thread.current['task'].deliver
61
- rescue Errno::ECONNREFUSED => e
62
- raise(e) unless Thread.current['attempts'] < 3
73
+ #
74
+ # Process a given task
75
+ #
76
+ # @param [Cloudtasker::CloudTask] task The task to process
77
+ #
78
+ def process_task(task)
79
+ Thread.current['task'] = task
80
+ Thread.current['attempts'] = 0
63
81
 
64
- # Retry on connection error, in case the web server is not
65
- # started yet.
66
- Thread.current['attempts'] += 1
67
- sleep(3)
68
- retry
69
- end
70
- end
82
+ # Deliver task
83
+ begin
84
+ Thread.current['task'].deliver
85
+ rescue Errno::ECONNREFUSED => e
86
+ raise(e) unless Thread.current['attempts'] < 3
87
+
88
+ # Retry on connection error, in case the web server is not
89
+ # started yet.
90
+ Thread.current['attempts'] += 1
91
+ sleep(3)
92
+ retry
71
93
  end
72
94
  end
73
95
  end
@@ -4,11 +4,9 @@ require 'redis'
4
4
 
5
5
  module Cloudtasker
6
6
  # A wrapper with helper methods for redis
7
- module RedisClient
8
- module_function
9
-
7
+ class RedisClient
10
8
  # Suffix added to cache keys when locking them
11
- LOCK_KEY_SUFFIX = 'lock'
9
+ LOCK_KEY_PREFIX = 'cloudtasker/lock'
12
10
 
13
11
  #
14
12
  # Return the underlying redis client.
@@ -50,9 +48,10 @@ module Cloudtasker
50
48
  # Acquire a lock on a cache entry.
51
49
  #
52
50
  # @example
53
- # RedisClient.with_lock('foo')
54
- # content = RedisClient.fetch('foo')
55
- # RedisClient.set(content.merge(bar: 'bar).to_json)
51
+ # redis = RedisClient.new
52
+ # redis.with_lock('foo')
53
+ # content = redis.fetch('foo')
54
+ # redis.set(content.merge(bar: 'bar).to_json)
56
55
  # end
57
56
  #
58
57
  # @param [String] cache_key The cache key to access.
@@ -61,7 +60,7 @@ module Cloudtasker
61
60
  return nil unless cache_key
62
61
 
63
62
  # Wait to acquire lock
64
- lock_key = [cache_key, LOCK_KEY_SUFFIX].join('/')
63
+ lock_key = [LOCK_KEY_PREFIX, cache_key].join('/')
65
64
  true until client.setnx(lock_key, true)
66
65
 
67
66
  # yield content
@@ -100,10 +100,10 @@ module Cloudtasker
100
100
  #
101
101
  # Return the Cloudtasker redis client.
102
102
  #
103
- # @return [Class] The Cloudtasker::RedisClient wrapper.
103
+ # @return [Cloudtasker::RedisClient] The cloudtasker redis client.
104
104
  #
105
105
  def redis
106
- Cloudtasker::RedisClient
106
+ @redis ||= Cloudtasker::RedisClient.new
107
107
  end
108
108
 
109
109
  #
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Cloudtasker
4
- VERSION = '0.3.0'
4
+ VERSION = '0.8.0'
5
5
  end
@@ -6,6 +6,7 @@ module Cloudtasker
6
6
  # Add class method to including class
7
7
  def self.included(base)
8
8
  base.extend(ClassMethods)
9
+ base.attr_writer :job_queue
9
10
  base.attr_accessor :job_args, :job_id, :job_meta, :job_reenqueued, :job_retries
10
11
  end
11
12
 
@@ -32,8 +33,9 @@ module Cloudtasker
32
33
  # @return [Cloudtasker::Worker, nil] The instantiated worker.
33
34
  #
34
35
  def self.from_hash(hash)
35
- # Symbolize payload keys
36
+ # Symbolize metadata keys and stringify job arguments
36
37
  payload = JSON.parse(hash.to_json, symbolize_names: true)
38
+ payload[:job_args] = JSON.parse(hash[:job_args].to_json)
37
39
 
38
40
  # Extract worker parameters
39
41
  klass_name = payload&.dig(:worker)
@@ -44,7 +46,7 @@ module Cloudtasker
44
46
  return nil unless worker_klass.include?(self)
45
47
 
46
48
  # Return instantiated worker
47
- worker_klass.new(payload.slice(:job_args, :job_id, :job_meta, :job_retries))
49
+ worker_klass.new(payload.slice(:job_queue, :job_args, :job_id, :job_meta, :job_retries))
48
50
  rescue NameError
49
51
  nil
50
52
  end
@@ -80,7 +82,7 @@ module Cloudtasker
80
82
  # @return [Cloudtasker::CloudTask] The Google Task response
81
83
  #
82
84
  def perform_async(*args)
83
- perform_in(nil, *args)
85
+ schedule(args: args)
84
86
  end
85
87
 
86
88
  #
@@ -92,7 +94,7 @@ module Cloudtasker
92
94
  # @return [Cloudtasker::CloudTask] The Google Task response
93
95
  #
94
96
  def perform_in(interval, *args)
95
- new(job_args: args).schedule(interval: interval)
97
+ schedule(args: args, time_in: interval)
96
98
  end
97
99
 
98
100
  #
@@ -104,7 +106,21 @@ module Cloudtasker
104
106
  # @return [Cloudtasker::CloudTask] The Google Task response
105
107
  #
106
108
  def perform_at(time_at, *args)
107
- new(job_args: args).schedule(time_at: time_at)
109
+ schedule(args: args, time_at: time_at)
110
+ end
111
+
112
+ #
113
+ # Enqueue a worker with explicity options.
114
+ #
115
+ # @param [Array<any>] args The job arguments.
116
+ # @param [Time, Integer] time_in The delay in seconds.
117
+ # @param [Time, Integer] time_at The time at which the job should run.
118
+ # @param [String, Symbol] queue The queue on which the worker should run.
119
+ #
120
+ # @return [Cloudtasker::CloudTask] The Google Task response
121
+ #
122
+ def schedule(args: nil, time_in: nil, time_at: nil, queue: nil)
123
+ new(job_args: args, job_queue: queue).schedule({ interval: time_in, time_at: time_at }.compact)
108
124
  end
109
125
 
110
126
  #
@@ -123,11 +139,30 @@ module Cloudtasker
123
139
  # @param [Array<any>] job_args The list of perform args.
124
140
  # @param [String] job_id A unique ID identifying this job.
125
141
  #
126
- def initialize(job_args: [], job_id: nil, job_meta: {}, job_retries: 0)
127
- @job_args = job_args
142
+ def initialize(job_queue: nil, job_args: nil, job_id: nil, job_meta: {}, job_retries: 0)
143
+ @job_args = job_args || []
128
144
  @job_id = job_id || SecureRandom.uuid
129
145
  @job_meta = MetaStore.new(job_meta)
130
146
  @job_retries = job_retries || 0
147
+ @job_queue = job_queue
148
+ end
149
+
150
+ #
151
+ # Return the class name of the worker.
152
+ #
153
+ # @return [String] The class name.
154
+ #
155
+ def job_class_name
156
+ self.class.to_s
157
+ end
158
+
159
+ #
160
+ # Return the queue to use for this worker.
161
+ #
162
+ # @return [String] The name of queue.
163
+ #
164
+ def job_queue
165
+ (@job_queue ||= self.class.cloudtasker_options_hash[:queue] || Config::DEFAULT_JOB_QUEUE).to_s
131
166
  end
132
167
 
133
168
  #
@@ -198,10 +233,10 @@ module Cloudtasker
198
233
  # Return a new instance of the worker with the same args and metadata
199
234
  # but with a different id.
200
235
  #
201
- # @return [<Type>] <description>
236
+ # @return [Cloudtasker::Worker] <description>
202
237
  #
203
238
  def new_instance
204
- self.class.new(job_args: job_args, job_meta: job_meta)
239
+ self.class.new(job_queue: job_queue, job_args: job_args, job_meta: job_meta)
205
240
  end
206
241
 
207
242
  #
@@ -215,7 +250,8 @@ module Cloudtasker
215
250
  job_id: job_id,
216
251
  job_args: job_args,
217
252
  job_meta: job_meta.to_h,
218
- job_retries: job_retries
253
+ job_retries: job_retries,
254
+ job_queue: job_queue
219
255
  }
220
256
  end
221
257