workerholic 0.0.15 → 0.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 32b95e0d805dd75e55329784017505a16ec2f09a
4
- data.tar.gz: 33186490232cb74d67502377948d138726a0471e
3
+ metadata.gz: dfae6532e0efa5d65df17f7809ecc5130e50d85b
4
+ data.tar.gz: b23e1f15e6ba5f1bc1494f98c01eefda76969bea
5
5
  SHA512:
6
- metadata.gz: 3b5a55dd7464d82c4565630f715518421946146228c0bc0334e90d7964db3ec51eafd3f985535d1f8a24f2be94ff828603ace9a8102cc06a36217ec596b503e0
7
- data.tar.gz: c1c000ea9636e25a563f2577e142d875f6c311837395f012f156060856d77a6173a3d2d7faacaeed2a01e2d4d6138788b8eaa66306eaf6f9a3a72f7d2b7d6b67
6
+ metadata.gz: 573cd2b6034209e578c3305ffdcec17a6a2da39d20f74b84e747a8a2a7e87b8f7ffc7c4158cdd5f3a39183069151af4bf5ddd7b5a96ec27ec782febdce0cedb1
7
+ data.tar.gz: fb8a13ed3494db31e9177ddbddcac240121b3bd24ec603aa4f521bc8b676b6df49af99e3af87791a49bdb3619a4eaa3171b75edc7203925f7ff256e8b8263b1a
data/Gemfile CHANGED
@@ -8,6 +8,7 @@ gem 'sinatra'
8
8
 
9
9
  group :development, :test do
10
10
  gem 'pry-byebug'
11
+ gem 'sinatra-reloader'
11
12
  end
12
13
 
13
14
  group :test do
data/app_test/bm.rb ADDED
@@ -0,0 +1,45 @@
1
+ require 'benchmark'
2
+
3
+ class FibCruncher
4
+ def self.perform(n)
5
+ a, b = 0, 1
6
+ while b < n
7
+ a, b = b, a + b
8
+ end
9
+ b
10
+ end
11
+ end
12
+
13
+ def run_benchmark(n)
14
+ Benchmark.bm do |r|
15
+ r.report do
16
+ n.times do
17
+ FibCruncher.perform(1_000)
18
+ end
19
+ end
20
+
21
+ r.report do
22
+ n.times do
23
+ FibCruncher.perform(1_000_00)
24
+ end
25
+ end
26
+
27
+ r.report do
28
+ n.times do
29
+ FibCruncher.perform(1_000_000)
30
+ end
31
+ end
32
+
33
+ r.report do
34
+ n.times do
35
+ FibCruncher.perform(1_000_000_0)
36
+ end
37
+ end
38
+
39
+ r.report do
40
+ n.times do
41
+ FibCruncher.perform(1_000_000_000)
42
+ end
43
+ end
44
+ end
45
+ end
data/app_test/job_test.rb CHANGED
@@ -4,7 +4,7 @@ require 'prime'
4
4
 
5
5
  class JobTestFast
6
6
  include Workerholic::Job
7
- job_options queue_name: 'workerholic:queue:job_fast'
7
+ job_options queue_name: 'job_fast'
8
8
 
9
9
  def perform(str, num)
10
10
  str
@@ -13,7 +13,7 @@ end
13
13
 
14
14
  class JobTestSlow
15
15
  include Workerholic::Job
16
- job_options queue_name: 'workerholic:queue:job_slow'
16
+ job_options queue_name: 'job_slow'
17
17
 
18
18
  def perform(str, num)
19
19
  sleep(0.5)
@@ -46,8 +46,18 @@ end
46
46
  class HeavyCalculation
47
47
  include Workerholic::Job
48
48
 
49
- def perform(n, arr)
50
- arr = bubble_sort(arr)
49
+ def perform(arr_size)
50
+ #arr = bubble_sort(arr)
51
+ generate_large_array(arr_size)
52
+ end
53
+
54
+ def generate_large_array(arr_size)
55
+ result = []
56
+ arr_size.times do
57
+ result << rand(50 ** 10)
58
+ end
59
+
60
+ result
51
61
  end
52
62
 
53
63
  def bubble_sort(array)
@@ -87,6 +97,15 @@ class FutureJob
87
97
  end
88
98
  end
89
99
 
100
+ class FutureJobWithQueue
101
+ include Workerholic::Job
102
+ job_options queue_name: 'specific_queue_for_delayed_jobs'
103
+
104
+ def perform(n)
105
+ n
106
+ end
107
+ end
108
+
90
109
  class FailedJob
91
110
  include Workerholic::Job
92
111
 
@@ -94,3 +113,26 @@ class FailedJob
94
113
  raise Exception
95
114
  end
96
115
  end
116
+
117
+ class FailedJobWithQueue
118
+ include Workerholic::Job
119
+ job_options queue_name: 'specific_queue_for_failed_jobs'
120
+
121
+ def perform(n)
122
+ raise Exception
123
+ end
124
+ end
125
+
126
+ class FibCruncher
127
+ include Workerholic::Job
128
+
129
+ def perform(n)
130
+ a, b = 0, 1
131
+
132
+ while b < n
133
+ a, b = b, a + b
134
+ end
135
+
136
+ puts b
137
+ end
138
+ end
data/app_test/run.rb CHANGED
@@ -27,11 +27,9 @@ module TestRunner
27
27
  end
28
28
  end
29
29
 
30
- def self.sort_array(num_of_cycles, array_size)
31
- unsorted_array = (0..array_size).to_a.shuffle
32
-
33
- num_of_cycles.times do |n|
34
- HeavyCalculation.new.perform_async(n, unsorted_array)
30
+ def self.generate_array(num_of_cycles, array_size)
31
+ num_of_cycles.times do
32
+ HeavyCalculation.new.perform_async(array_size)
35
33
  end
36
34
  end
37
35
 
@@ -49,7 +47,13 @@ module TestRunner
49
47
 
50
48
  def self.enqueue_delayed(num_of_cycles)
51
49
  num_of_cycles.times do |n|
52
- FutureJob.new.perform_delayed(100, n)
50
+ FutureJob.new.perform_delayed(10, n)
51
+ end
52
+ end
53
+
54
+ def self.enqueue_delayed_with_queue(num_of_cycles)
55
+ num_of_cycles.times do |n|
56
+ FutureJobWithQueue.new.perform_delayed(10, n)
53
57
  end
54
58
  end
55
59
 
@@ -58,7 +62,34 @@ module TestRunner
58
62
  FailedJob.new.perform_async(n)
59
63
  end
60
64
  end
65
+
66
+ def self.failed_jobs_with_queue(num_of_cycles)
67
+ num_of_cycles.times do |n|
68
+ FailedJobWithQueue.new.perform_async(n)
69
+ end
70
+ end
71
+
72
+ def self.multiple_processes(num_of_proc, jobs)
73
+ pids = (1..num_of_proc).to_a.map do
74
+ fork do
75
+ jobs.each do |job|
76
+ TestRunner.send(:job[0], job[1..-1])
77
+ end
78
+
79
+ exit
80
+ end
81
+ end
82
+
83
+ pids.each { |pid| Process.wait(pid) }
84
+ end
85
+
86
+ def self.fibonacci_cruncher(num_of_cycles)
87
+ num_of_cycles.times do |n|
88
+ FibCruncher.new.perform_async(1_000)
89
+ end
90
+ end
61
91
  end
62
92
 
63
- #TestRunner.non_blocking(10)
64
- TestRunner.failed_jobs(1)
93
+ TestRunner.non_blocking(200)
94
+ TestRunner.blocking(50000)
95
+ TestRunner.fibonacci_cruncher(100)
@@ -10,7 +10,7 @@ module Workerholic
10
10
  define_method(:specified_job_options) do
11
11
  {
12
12
  execute_at: params[:execute_at],
13
- queue_name: params[:queue_name] || 'workerholic:queue:main'
13
+ queue_name: 'workerholic:queue:' + (params[:queue_name] || 'main')
14
14
  }
15
15
  end
16
16
  end
@@ -41,7 +41,12 @@ module Workerholic
41
41
  def prepare_job_for_enqueueing(args)
42
42
  raise ArgumentError if self.method(:perform).arity != args.size
43
43
 
44
- job = JobWrapper.new(klass: @class || self.class, arguments: args, wrapper: self.class)
44
+ job = JobWrapper.new(
45
+ klass: @class || self.class,
46
+ arguments: args,
47
+ wrapper: self.class,
48
+ queue: specified_job_options[:queue_name]
49
+ )
45
50
 
46
51
  job.statistics.enqueued_at = Time.now.to_f
47
52
 
@@ -16,6 +16,7 @@ module Workerholic
16
16
  job.statistics.completed_at = Time.now.to_f
17
17
 
18
18
  StatsStorage.save_job('completed_jobs', job)
19
+ StatsStorage.update_historical_stats('completed_jobs', job.klass.name)
19
20
 
20
21
  # @logger.info("Completed: your job from class #{job.klass} was completed on #{job.statistics.completed_at}.")
21
22
  rescue Exception => e
@@ -34,6 +35,7 @@ module Workerholic
34
35
  else
35
36
  job.statistics.failed_on = Time.now.to_f
36
37
  StatsStorage.save_job('failed_jobs', job)
38
+ StatsStorage.update_historical_stats('failed_jobs', job.klass.name)
37
39
 
38
40
  # @logger.error("Failed: your job from class #{job.class} was unsuccessful.")
39
41
  end
@@ -1,11 +1,10 @@
1
1
  module Workerholic
2
2
  class JobScheduler
3
- attr_reader :sorted_set, :queue, :scheduler_thread
3
+ attr_reader :sorted_set, :scheduler_thread
4
4
  attr_accessor :alive
5
5
 
6
6
  def initialize(opts={})
7
7
  @sorted_set = opts[:sorted_set] || SortedSet.new
8
- @queue = Queue.new(opts[:queue_name] || 'workerholic:queue:main')
9
8
  @alive = true
10
9
  end
11
10
 
@@ -31,8 +30,12 @@ module Workerholic
31
30
  if job_due?
32
31
  while job_due?
33
32
  serialized_job, job_execution_time = sorted_set.peek
34
- sorted_set.remove(job_execution_time)
33
+ job = JobSerializer.deserialize(serialized_job)
34
+ queue = job.queue ? Queue.new(job.queue) : Queue.new
35
+
35
36
  queue.enqueue(serialized_job)
37
+
38
+ sorted_set.remove(job_execution_time)
36
39
  end
37
40
  else
38
41
  sleep(2)
@@ -1,12 +1,13 @@
1
1
  module Workerholic
2
2
  class JobWrapper
3
3
  attr_accessor :retry_count, :execute_at
4
- attr_reader :klass, :arguments, :statistics, :wrapper
4
+ attr_reader :klass, :arguments, :statistics, :wrapper, :queue
5
5
 
6
6
  def initialize(options={})
7
7
  @klass = options[:klass]
8
8
  @wrapper = options[:wrapper]
9
9
  @arguments = options[:arguments]
10
+ @queue = options[:queue]
10
11
  @execute_at = options[:execute_at]
11
12
  @retry_count = options[:retry_count] || 0
12
13
  @statistics = JobStatistics.new(options[:statistics] || {})
@@ -17,6 +18,7 @@ module Workerholic
17
18
  klass: klass,
18
19
  wrapper: wrapper,
19
20
  arguments: arguments,
21
+ queue: queue,
20
22
  retry_count: retry_count,
21
23
  execute_at: execute_at,
22
24
  statistics: statistics.to_hash
@@ -7,12 +7,13 @@ module Workerholic
7
7
  def self.start
8
8
  apply_options
9
9
  load_app
10
- track_memory_usage
10
+ track_memory_usage_and_expire_job_stats
11
11
  launch
12
12
  end
13
13
 
14
14
  def self.kill_memory_tracker_thread
15
15
  @thread.kill
16
+ StatsStorage.delete_memory_stats
16
17
  end
17
18
 
18
19
  private
@@ -67,21 +68,16 @@ module Workerholic
67
68
  exit
68
69
  end
69
70
 
70
- def self.track_memory_usage
71
- cleanup_old_memory_stats
72
-
71
+ def self.track_memory_usage_and_expire_job_stats
73
72
  @thread = Thread.new do
74
73
  loop do
75
74
  sleep 5
76
75
  StatsStorage.save_processes_memory_usage
76
+ StatsStorage.delete_expired_job_stats
77
77
  end
78
78
  end
79
79
  end
80
80
 
81
- def self.cleanup_old_memory_stats
82
- StatsStorage.delete_memory_stats
83
- end
84
-
85
81
  def self.launch
86
82
  if options[:processes] && options[:processes] > 1
87
83
  begin
@@ -1,39 +1,52 @@
1
1
  module Workerholic
2
2
  class StatsAPI
3
3
  CATEGORIES = %w(completed_jobs failed_jobs)
4
+ POLLING_INTERVAL = 10
4
5
 
5
6
  def self.job_statistics(options={})
6
- if CATEGORIES.include? options[:category]
7
- job_classes = storage.get_keys_for_namespace('workerholic:stats:' + options[:category] + ':*')
7
+ raise ArgumentError, "Please specify one of the following categories: 'completed_jobs', 'failed_jobs'" unless CATEGORIES.include? options[:category]
8
8
 
9
- if options[:count_only]
10
- self.parse_job_classes(job_classes)
11
- else
12
- self.parse_job_classes(job_classes, false)
13
- end
9
+ job_classes = storage.get_keys_for_namespace("workerholic:stats:#{options[:category]}:*")
10
+
11
+ if options[:count_only]
12
+ parse_job_classes(job_classes)
14
13
  else
15
- logger("Invalid arguments. Please specify one of the following categories:\n'completed_jobs', 'failed_jobs'.")
14
+ parse_job_classes(job_classes, false)
15
+ end
16
+ end
17
+
18
+ def self.job_statistics_history(category)
19
+ raise ArgumentError, "Please specify one of the following categories: 'completed_jobs', 'failed_jobs'" unless CATEGORIES.include? category
20
+
21
+ current_time = Time.now.to_i
22
+ all_job_stats(category).reduce([]) do |result, job|
23
+ completed_time = job.last.to_i
24
+ index = (current_time - completed_time) / POLLING_INTERVAL
25
+
26
+ result[index] = result[index] ? result[index] + 1 : 1
27
+
28
+ result
16
29
  end
17
30
  end
18
31
 
19
32
  def self.scheduled_jobs(options={})
20
33
  namespace = 'workerholic:scheduled_jobs'
21
34
  if options[:count_only]
22
- storage.sorted_set_members_count(namespace)
35
+ storage.sorted_set_size(namespace)
23
36
  else
24
- serialized_jobs = storage.sorted_set_members(namespace)
37
+ serialized_jobs = storage.sorted_set_all_members(namespace)
25
38
  parse_scheduled_jobs(serialized_jobs)
26
39
  end
27
40
  end
28
41
 
29
- def self.jobs_classes
30
- classes = storage.get_keys_for_namespace('workerholic:stats:*')
42
+ def self.jobs_classes(historical)
43
+ base_namespace = historical ? 'workerholic:stats:historical:' : 'workerholic:stats:'
31
44
 
32
- parsed_classes = classes.map do |klass|
33
- klass.split(':').last
34
- end.uniq
45
+ completed_classes = storage.get_keys_for_namespace( base_namespace + 'completed_jobs:*')
46
+ failed_classes = storage.get_keys_for_namespace(base_namespace + 'failed_jobs:*')
47
+ combined_classes = completed_classes + failed_classes
35
48
 
36
- parsed_classes.empty? ? 'No class data is available yet.' : parsed_classes
49
+ combined_classes.map { |klass| klass.split(':').last }.uniq
37
50
  end
38
51
 
39
52
  def self.queued_jobs
@@ -55,38 +68,62 @@ module Workerholic
55
68
  storage.hash_keys(namespace)
56
69
  end
57
70
 
71
+ def self.history_for_period(options={})
72
+ raise ArgumentError, 'Please provide a category namespace' unless options[:category]
73
+
74
+ if options[:klass]
75
+ namespace = "workerholic:stats:historical:#{options[:category]}:#{options[:klass]}"
76
+ else
77
+ namespace = "workerholic:stats:historical:#{options[:category]}"
78
+ end
79
+
80
+ period = options[:period] || 30
81
+ date_ranges = get_past_dates(period)
82
+
83
+ job_counts = storage.hash_get_multiple_elements(namespace, date_ranges)
84
+
85
+ combine_ranges(job_counts: job_counts, date_ranges: date_ranges)
86
+ end
87
+
58
88
  private
59
89
 
60
- def self.storage
61
- @storage ||= Storage::RedisWrapper.new
90
+ def self.combine_ranges(options={})
91
+ job_counts = options[:job_counts]
92
+ job_counts.map!(&:to_i)
93
+
94
+ {
95
+ date_ranges: options[:date_ranges],
96
+ job_counts: job_counts
97
+ }
62
98
  end
63
99
 
64
- def self.logger(message)
65
- @log ||= LogManager.new
100
+ def self.get_past_dates(days)
101
+ today = Time.now.utc.to_i - Time.now.utc.to_i % 86400
102
+
103
+ (0..days).map { |day| today - day * 86400 }
66
104
  end
67
105
 
68
106
  def self.parse_scheduled_jobs(jobs)
69
107
  jobs.map do |job|
70
108
  deserialized_job = JobSerializer.deserialize_stats(job)
71
- self.convert_klass_to_string(deserialized_job)
109
+ convert_klass_to_string(deserialized_job)
72
110
  end
73
111
  end
74
112
 
75
113
  def self.parse_job_classes(job_classes, count_only = true)
76
114
  job_classes.map do |job_class|
77
115
  if count_only
78
- self.jobs_per_class(job_class)
116
+ jobs_per_class(job_class)
79
117
  else
80
- self.get_jobs_for_class(job_class)
118
+ get_jobs_for_class(job_class)
81
119
  end
82
120
  end
83
121
  end
84
122
 
85
123
  def self.get_jobs_for_class(job_class)
86
- serialized_jobs = storage.get_all_elements_from_list(job_class)
124
+ serialized_jobs = storage.sorted_set_all_members(job_class)
87
125
  deserialized_stats = serialized_jobs.map do |serialized_job|
88
- deserialized_job = JobSerializer.deserialize_stats(serialized_job)
89
- self.convert_klass_to_string(deserialized_job)
126
+ JobSerializer.deserialize_stats(serialized_job)
90
127
  end
91
128
 
92
129
  deserialized_stats << deserialized_stats.size
@@ -94,13 +131,29 @@ module Workerholic
94
131
 
95
132
  def self.jobs_per_class(job_class)
96
133
  clean_class_name = job_class.split(':').last
97
- [clean_class_name, storage.list_length(job_class)]
134
+ [clean_class_name, storage.sorted_set_size(job_class)]
98
135
  end
99
136
 
100
137
  def self.convert_klass_to_string(obj)
101
138
  obj[:klass] = obj[:klass].to_s
102
- obj[:wrapper] = obj[:wrapper].to_s
139
+ obj[:wrapper] = nil
103
140
  obj
104
141
  end
142
+
143
+ def self.storage
144
+ @storage ||= Storage::RedisWrapper.new
145
+ end
146
+
147
+ def self.logger(message)
148
+ @log ||= LogManager.new
149
+ end
150
+
151
+ def self.all_job_stats(category)
152
+ current_time = Time.now.to_i
153
+
154
+ jobs_classes(false).map do |klass|
155
+ storage.sorted_set_range_members("workerholic:stats:#{category}:#{klass}", current_time - 100 * POLLING_INTERVAL, current_time)
156
+ end.flatten(1)
157
+ end
105
158
  end
106
159
  end
@@ -1,11 +1,21 @@
1
1
  module Workerholic
2
2
  class StatsStorage
3
-
4
3
  def self.save_job(category, job)
5
- serialized_job_stats = JobSerializer.serialize(job)
4
+ job_hash = job.to_hash
5
+ job_hash[:klass] = job.klass.to_s
6
+ serialized_job_stats = JobSerializer.serialize(job_hash)
6
7
 
7
8
  namespace = "workerholic:stats:#{category}:#{job.klass}"
8
- storage.push(namespace, serialized_job_stats)
9
+ storage.add_to_set(namespace, job.statistics.completed_at, serialized_job_stats)
10
+ end
11
+
12
+ def self.update_historical_stats(category, klass)
13
+ current_day_secs = Time.now.utc.to_i - (Time.now.utc.to_i % 86400)
14
+ namespace = "workerholic:stats:historical:#{category}"
15
+ namespace_with_class = "workerholic:stats:historical:#{category}:#{klass}"
16
+
17
+ storage.hash_increment_field(namespace, current_day_secs, 1)
18
+ storage.hash_increment_field(namespace_with_class, current_day_secs, 1)
9
19
  end
10
20
 
11
21
  def self.save_processes_memory_usage
@@ -19,6 +29,14 @@ module Workerholic
19
29
  storage.delete('workerholic:stats:memory:processes')
20
30
  end
21
31
 
32
+ def self.delete_expired_job_stats
33
+ max_time = Time.now.to_i - 1001
34
+ StatsAPI.jobs_classes(false).each do |klass|
35
+ storage.remove_range_from_set("workerholic:stats:completed_jobs:#{klass}", 0, max_time)
36
+ storage.remove_range_from_set("workerholic:stats:failed_jobs:#{klass}", 0, max_time)
37
+ end
38
+ end
39
+
22
40
  class << self
23
41
  private
24
42
 
@@ -1,6 +1,5 @@
1
1
  module Workerholic
2
2
  class Storage
3
- # Wraps redis-rb gem methods for enqueueing/dequeuing purposes
4
3
  class RedisWrapper
5
4
  attr_reader :redis, :retries
6
5
 
@@ -21,6 +20,22 @@ module Workerholic
21
20
  execute(retry_delay) { |conn| conn.hget(key, field) }
22
21
  end
23
22
 
23
+ def hash_get_all(key, retry_delay = 5)
24
+ execute(retry_delay) { |conn| conn.hgetall(key) }
25
+ end
26
+
27
+ def hash_keys(namespace, retry_delay = 5)
28
+ execute(retry_delay) { |conn| conn.hkeys(namespace) }
29
+ end
30
+
31
+ def hash_get_multiple_elements(key, fields, retry_delay = 5)
32
+ execute(retry_delay) { |conn| conn.hmget(key, *fields) }
33
+ end
34
+
35
+ def hash_increment_field(key, field, increment, retry_delay = 5)
36
+ execute(retry_delay) { |conn| conn.hincrby(key, field, increment) }
37
+ end
38
+
24
39
  def delete(key, retry_delay = 5)
25
40
  execute(retry_delay) { |conn| conn.del(key) }
26
41
  end
@@ -51,15 +66,19 @@ module Workerholic
51
66
  end
52
67
 
53
68
  def sorted_set_size(key, retry_delay = 5)
54
- execute(retry_delay) { |conn| conn.zcount(key, 0, '+inf') }
69
+ execute(retry_delay) { |conn| conn.zcard(key) }
55
70
  end
56
71
 
57
- def sorted_set_members(key, retry_delay = 5)
72
+ def sorted_set_all_members(key, retry_delay = 5)
58
73
  execute(retry_delay) { |conn| conn.zrange(key, 0, -1) }
59
74
  end
60
75
 
61
- def sorted_set_members_count(key, retry_delay = 5)
62
- execute(retry_delay) { |conn| conn.zcard(key) }
76
+ def sorted_set_range_members(key, minscore, maxscore, retry_delay = 5)
77
+ execute(retry_delay) { |conn| conn.zrangebyscore(key, minscore, maxscore, with_scores: true) }
78
+ end
79
+
80
+ def remove_range_from_set(key, minscore, maxscore, retry_delay = 5)
81
+ execute(retry_delay) { |conn| conn.zremrangebyscore(key, minscore, maxscore) }
63
82
  end
64
83
 
65
84
  def keys_count(namespace, retry_delay = 5)
@@ -67,9 +86,9 @@ module Workerholic
67
86
  end
68
87
 
69
88
  def fetch_queue_names(retry_delay = 5)
70
- queue_name_pattern = $TESTING ? 'workerholic:testing:queue*' : 'workerholic:queue*'
89
+ queue_name_pattern = 'workerholic:queue:*'
71
90
 
72
- execute(retry_delay) { |conn| conn.scan(0, match: queue_name_pattern).last }
91
+ execute(retry_delay) { |conn| conn.keys(queue_name_pattern) }
73
92
  end
74
93
 
75
94
  def get_keys_for_namespace(namespace, retry_delay = 5)
@@ -80,18 +99,6 @@ module Workerholic
80
99
  execute(retry_delay) { |conn| conn.lrange(key, 0, -1) }
81
100
  end
82
101
 
83
- def hash_get(key, field, retry_delay = 5)
84
- execute(retry_delay) { |conn| conn.hget(key, field) }
85
- end
86
-
87
- def hash_get_all(key, retry_delay = 5)
88
- execute(retry_delay) { |conn| conn.hgetall(key) }
89
- end
90
-
91
- def hash_keys(namespace, retry_delay = 5)
92
- execute(retry_delay) { |conn| conn.hkeys(namespace) }
93
- end
94
-
95
102
  class RedisCannotRecover < Redis::CannotConnectError; end
96
103
 
97
104
  private
@@ -1,3 +1,3 @@
1
1
  module Workerholic
2
- VERSION = '0.0.15'
2
+ VERSION = '0.0.16'
3
3
  end
data/lib/workerholic.rb CHANGED
@@ -32,6 +32,7 @@ require 'workerholic/adapters/active_job_adapter' if defined?(Rails)
32
32
 
33
33
  module Workerholic
34
34
  PIDS = [Process.pid]
35
+ REDIS_URL = ENV['REDIS_URL'] || 'redis://localhost:' + ($TESTING ? '1234' : '6379')
35
36
 
36
37
  def self.workers_count
37
38
  @workers_count || 25
@@ -43,6 +44,8 @@ module Workerholic
43
44
  end
44
45
 
45
46
  def self.redis_pool
46
- @redis ||= ConnectionPool.new(size: workers_count + 5, timeout: 5) { Redis.new }
47
+ @redis ||= ConnectionPool.new(size: workers_count + 5, timeout: 5) do
48
+ Redis.new(url: REDIS_URL)
49
+ end
47
50
  end
48
51
  end
@@ -1,7 +1,9 @@
1
- TEST_QUEUE = 'workerholic:testing:queue:test_queue'
2
- ANOTHER_TEST_QUEUE = 'workerholic:testing:queue:another_test_queue'
3
- BALANCER_TEST_QUEUE = 'workerholic:testing:queue:balancer_test_queue'
4
- ANOTHER_BALANCER_TEST_QUEUE = 'workerholic:testing:queue:another_balancer_test_queue'
1
+ WORKERHOLIC_QUEUE_NAMESPACE = 'workerholic:queue:'
2
+
3
+ TEST_QUEUE = 'test_queue'
4
+ ANOTHER_TEST_QUEUE = 'another_test_queue'
5
+ BALANCER_TEST_QUEUE = 'balancer_test_queue'
6
+ ANOTHER_BALANCER_TEST_QUEUE = 'another_balancer_test_queue'
5
7
  TEST_SCHEDULED_SORTED_SET = 'workerholic:testing:scheduled_jobs'
6
8
  HASH_TEST = 'workerholic:testing:hash_test'
7
9