sidekiq-unique-jobs 6.0.23 → 7.1.12

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (127) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +922 -41
  3. data/README.md +821 -284
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +123 -0
  5. data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
  6. data/lib/sidekiq_unique_jobs/cli.rb +34 -31
  7. data/lib/sidekiq_unique_jobs/config.rb +314 -0
  8. data/lib/sidekiq_unique_jobs/connection.rb +6 -5
  9. data/lib/sidekiq_unique_jobs/constants.rb +45 -24
  10. data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
  11. data/lib/sidekiq_unique_jobs/deprecation.rb +65 -0
  12. data/lib/sidekiq_unique_jobs/digests.rb +70 -102
  13. data/lib/sidekiq_unique_jobs/exceptions.rb +88 -12
  14. data/lib/sidekiq_unique_jobs/job.rb +41 -12
  15. data/lib/sidekiq_unique_jobs/json.rb +47 -0
  16. data/lib/sidekiq_unique_jobs/key.rb +93 -0
  17. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +111 -82
  18. data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
  19. data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
  20. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +40 -15
  21. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +25 -7
  22. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +22 -2
  23. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +26 -16
  24. data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
  25. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +23 -12
  26. data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
  27. data/lib/sidekiq_unique_jobs/lock.rb +325 -0
  28. data/lib/sidekiq_unique_jobs/lock_args.rb +123 -0
  29. data/lib/sidekiq_unique_jobs/lock_config.rb +126 -0
  30. data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
  31. data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
  32. data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
  33. data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
  34. data/lib/sidekiq_unique_jobs/locksmith.rb +275 -102
  35. data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
  36. data/lib/sidekiq_unique_jobs/logging.rb +188 -33
  37. data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
  38. data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
  39. data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
  40. data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
  41. data/lib/sidekiq_unique_jobs/lua/lock.lua +93 -0
  42. data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
  43. data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
  44. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +94 -0
  45. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
  46. data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
  47. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
  48. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
  49. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
  50. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
  51. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
  52. data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
  53. data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
  54. data/lib/sidekiq_unique_jobs/lua/unlock.lua +102 -0
  55. data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
  56. data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
  57. data/lib/sidekiq_unique_jobs/middleware/client.rb +40 -0
  58. data/lib/sidekiq_unique_jobs/middleware/server.rb +29 -0
  59. data/lib/sidekiq_unique_jobs/middleware.rb +29 -31
  60. data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
  61. data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
  62. data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
  63. data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
  64. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +61 -15
  65. data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
  66. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +16 -5
  67. data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
  68. data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
  69. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +35 -32
  70. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
  71. data/lib/sidekiq_unique_jobs/orphans/manager.rb +248 -0
  72. data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
  73. data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
  74. data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
  75. data/lib/sidekiq_unique_jobs/orphans/reaper_resurrector.rb +170 -0
  76. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +231 -0
  77. data/lib/sidekiq_unique_jobs/redis/entity.rb +112 -0
  78. data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
  79. data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
  80. data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
  81. data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
  82. data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
  83. data/lib/sidekiq_unique_jobs/redis.rb +11 -0
  84. data/lib/sidekiq_unique_jobs/reflectable.rb +26 -0
  85. data/lib/sidekiq_unique_jobs/reflections.rb +79 -0
  86. data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
  87. data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
  88. data/lib/sidekiq_unique_jobs/script/caller.rb +127 -0
  89. data/lib/sidekiq_unique_jobs/script.rb +15 -0
  90. data/lib/sidekiq_unique_jobs/server.rb +61 -0
  91. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +114 -65
  92. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +241 -35
  93. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +18 -16
  94. data/lib/sidekiq_unique_jobs/testing.rb +62 -21
  95. data/lib/sidekiq_unique_jobs/timer_task.rb +78 -0
  96. data/lib/sidekiq_unique_jobs/timing.rb +58 -0
  97. data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
  98. data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
  99. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
  100. data/lib/sidekiq_unique_jobs/version.rb +3 -1
  101. data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
  102. data/lib/sidekiq_unique_jobs/web/helpers.rb +128 -13
  103. data/lib/sidekiq_unique_jobs/web/views/_paging.erb +4 -4
  104. data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
  105. data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
  106. data/lib/sidekiq_unique_jobs/web/views/locks.erb +54 -0
  107. data/lib/sidekiq_unique_jobs/web.rb +57 -27
  108. data/lib/sidekiq_unique_jobs.rb +52 -7
  109. data/lib/tasks/changelog.rake +15 -15
  110. metadata +124 -184
  111. data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
  112. data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
  113. data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
  114. data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
  115. data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
  116. data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
  117. data/lib/sidekiq_unique_jobs/util.rb +0 -103
  118. data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
  119. data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
  120. data/redis/acquire_lock.lua +0 -21
  121. data/redis/convert_legacy_lock.lua +0 -13
  122. data/redis/delete.lua +0 -14
  123. data/redis/delete_by_digest.lua +0 -23
  124. data/redis/delete_job_by_digest.lua +0 -60
  125. data/redis/lock.lua +0 -62
  126. data/redis/release_stale_locks.lua +0 -90
  127. data/redis/unlock.lua +0 -35
@@ -1,56 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "sidekiq_unique_jobs/server/middleware"
4
-
5
- module SidekiqUniqueJobs
6
- module Client
7
- # The unique sidekiq middleware for the client push
8
- #
9
- # @author Mikael Henriksson <mikael@zoolutions.se>
10
- class Middleware
11
- include SidekiqUniqueJobs::Logging
12
- include OptionsWithFallback
13
-
14
- # Calls this client middleware
15
- # Used from Sidekiq.process_single
16
- # @param [String] worker_class name of the sidekiq worker class
17
- # @param [Hash] item a sidekiq job hash
18
- # @param [String] queue name of the queue
19
- # @param [Sidekiq::RedisConnection, ConnectionPool] redis_pool the redis connection
20
- # @yield when uniqueness is disable or lock successful
21
- def call(worker_class, item, queue, redis_pool = nil)
22
- @worker_class = worker_class
23
- @item = item
24
- @queue = queue
25
- @redis_pool = redis_pool
26
-
27
- yield if success?
28
- end
29
-
30
- private
31
-
32
- # The sidekiq job hash
33
- # @return [Hash] the Sidekiq job hash
34
- attr_reader :item
35
-
36
- def success?
37
- unique_disabled? || locked?
38
- end
39
-
40
- def locked?
41
- SidekiqUniqueJobs::Job.add_uniqueness(item)
42
- SidekiqUniqueJobs.with_context(logging_context(self.class, item)) do
43
- locked = lock.lock
44
- warn_about_duplicate unless locked
45
- locked
46
- end
47
- end
48
-
49
- def warn_about_duplicate
50
- return unless log_duplicate_payload?
51
-
52
- log_warn "payload is not unique #{item}"
53
- end
54
- end
55
- end
56
- end
@@ -1,118 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "pathname"
4
- require "digest/sha1"
5
- require "concurrent/map"
6
-
7
- module SidekiqUniqueJobs
8
- # Interface to dealing with .lua files
9
- #
10
- # @author Mikael Henriksson <mikael@zoolutions.se>
11
- module Scripts
12
- LUA_PATHNAME ||= Pathname.new(__FILE__).dirname.join("../../redis").freeze
13
- SCRIPT_SHAS ||= Concurrent::Map.new
14
-
15
- include SidekiqUniqueJobs::Connection
16
-
17
- module_function
18
-
19
- #
20
- # Call a lua script with the provided file_name
21
- #
22
- # @note this method is recursive if we need to load a lua script
23
- # that wasn't previously loaded.
24
- #
25
- # @param [Symbol] file_name the name of the lua script
26
- # @param [Sidekiq::RedisConnection, ConnectionPool] redis_pool the redis connection
27
- # @param [Hash] options arguments to pass to the script file
28
- # @option options [Array] :keys the array of keys to pass to the script
29
- # @option options [Array] :argv the array of arguments to pass to the script
30
- #
31
- # @return value from script
32
- #
33
- def call(file_name, redis_pool, options = {})
34
- execute_script(file_name, redis_pool, options)
35
- rescue Redis::CommandError => ex
36
- handle_error(ex, file_name) do
37
- call(file_name, redis_pool, options)
38
- end
39
- end
40
-
41
- #
42
- # Execute the script file
43
- #
44
- # @param [Symbol] file_name the name of the lua script
45
- # @param [Sidekiq::RedisConnection, ConnectionPool] redis_pool the redis connection
46
- # @param [Hash] options arguments to pass to the script file
47
- # @option options [Array] :keys the array of keys to pass to the script
48
- # @option options [Array] :argv the array of arguments to pass to the script
49
- #
50
- # @return value from script (evalsha)
51
- #
52
- def execute_script(file_name, redis_pool, options = {})
53
- redis(redis_pool) do |conn|
54
- sha = script_sha(conn, file_name)
55
- conn.evalsha(sha, options)
56
- end
57
- end
58
-
59
- #
60
- # Return sha of already loaded lua script or load it and return the sha
61
- #
62
- # @param [Sidekiq::RedisConnection] conn the redis connection
63
- # @param [Symbol] file_name the name of the lua script
64
- # @return [String] sha of the script file
65
- #
66
- # @return [String] the sha of the script
67
- #
68
- def script_sha(conn, file_name)
69
- if (sha = SCRIPT_SHAS.get(file_name))
70
- return sha
71
- end
72
-
73
- sha = conn.script(:load, script_source(file_name))
74
- SCRIPT_SHAS.put(file_name, sha)
75
- sha
76
- end
77
-
78
- #
79
- # Handle errors to allow retrying errors that need retrying
80
- #
81
- # @param [Redis::CommandError] ex exception to handle
82
- # @param [Symbol] file_name the name of the lua script
83
- #
84
- # @return [void]
85
- #
86
- # @yieldreturn [void] yields back to the caller when NOSCRIPT is raised
87
- def handle_error(ex, file_name)
88
- if ex.message == "NOSCRIPT No matching script. Please use EVAL."
89
- SCRIPT_SHAS.delete(file_name)
90
- return yield if block_given?
91
- end
92
-
93
- raise ScriptError, file_name: file_name, source_exception: ex
94
- end
95
-
96
- #
97
- # Reads the lua file from disk
98
- #
99
- # @param [Symbol] file_name the name of the lua script
100
- #
101
- # @return [String] the content of the lua file
102
- #
103
- def script_source(file_name)
104
- script_path(file_name).read
105
- end
106
-
107
- #
108
- # Construct a Pathname to a lua script
109
- #
110
- # @param [Symbol] file_name the name of the lua script
111
- #
112
- # @return [Pathname] the full path to the gems lua script
113
- #
114
- def script_path(file_name)
115
- LUA_PATHNAME.join("#{file_name}.lua")
116
- end
117
- end
118
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module SidekiqUniqueJobs
4
- module Server
5
- # The unique sidekiq middleware for the server processor
6
- #
7
- # @author Mikael Henriksson <mikael@zoolutions.se>
8
- class Middleware
9
- include Logging
10
- include OptionsWithFallback
11
-
12
- #
13
- #
14
- # Runs the server middleware (used from Sidekiq::Processor#process)
15
- #
16
- # @param [Sidekiq::Worker] worker_class
17
- # @param [Hash] item a sidekiq job hash
18
- # @param [String] queue name of the queue
19
- #
20
- # @see https://github.com/mperham/sidekiq/wiki/Job-Format
21
- # @see https://github.com/mperham/sidekiq/wiki/Middleware
22
- #
23
- # @yield when uniqueness is disabled
24
- # @yield when the lock is acquired
25
- def call(worker_class, item, queue)
26
- @worker_class = worker_class
27
- @item = item
28
- @queue = queue
29
- return yield if unique_disabled?
30
-
31
- SidekiqUniqueJobs::Job.add_uniqueness(item)
32
- SidekiqUniqueJobs.with_context(logging_context(self.class, item)) do
33
- lock.execute do
34
- yield
35
- end
36
- end
37
- end
38
-
39
- private
40
-
41
- # The sidekiq job hash
42
- # @return [Hash] the Sidekiq job hash
43
- attr_reader :item
44
- end
45
- end
46
- end
@@ -1,63 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module SidekiqUniqueJobs
4
- module Timeout
5
- # Calculates timeout and expiration
6
- #
7
- # @author Mikael Henriksson <mikael@zoolutions.se>
8
- class Calculator
9
- include SidekiqUniqueJobs::SidekiqWorkerMethods
10
-
11
- # @attr [Hash] item the Sidekiq job hash
12
- attr_reader :item
13
-
14
- # @param [Hash] item the Sidekiq job hash
15
- # @option item [Integer, nil] :lock_expiration the configured lock expiration
16
- # @option item [Integer, nil] :lock_timeout the configured lock timeout
17
- # @option item [String] :class the class of the sidekiq worker
18
- # @option item [Float] :at the unix time the job is scheduled at
19
- def initialize(item)
20
- @item = item
21
- @worker_class = item[CLASS_KEY]
22
- end
23
-
24
- # The time until a job is scheduled
25
- # @return [Integer] the number of seconds until job is scheduled
26
- def time_until_scheduled
27
- return 0 unless scheduled_at
28
-
29
- scheduled_at.to_i - Time.now.utc.to_i
30
- end
31
-
32
- # The time a job is scheduled
33
- # @return [Float] the exact unix time the job is scheduled at
34
- def scheduled_at
35
- @scheduled_at ||= item[AT_KEY]
36
- end
37
-
38
- # The configured lock_expiration
39
- def lock_expiration
40
- @lock_expiration ||= begin
41
- expiration = item[LOCK_EXPIRATION_KEY]
42
- expiration ||= worker_options[LOCK_EXPIRATION_KEY]
43
- expiration && expiration.to_i + time_until_scheduled
44
- end
45
- end
46
-
47
- # The configured lock_timeout
48
- def lock_timeout
49
- @lock_timeout = begin
50
- timeout = default_worker_options[LOCK_TIMEOUT_KEY]
51
- timeout = default_lock_timeout if default_lock_timeout
52
- timeout = worker_options[LOCK_TIMEOUT_KEY] if worker_options.key?(LOCK_TIMEOUT_KEY)
53
- timeout
54
- end
55
- end
56
-
57
- # The default lock_timeout of this gem
58
- def default_lock_timeout
59
- SidekiqUniqueJobs.config.default_lock_timeout
60
- end
61
- end
62
- end
63
- end
@@ -1,8 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module SidekiqUniqueJobs
4
- module Timeout
5
- end
6
- end
7
-
8
- require "sidekiq_unique_jobs/timeout/calculator"
@@ -1,150 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "digest"
4
- require "openssl"
5
- require "sidekiq_unique_jobs/normalizer"
6
-
7
- module SidekiqUniqueJobs
8
- # Handles uniqueness of sidekiq arguments
9
- #
10
- # @author Mikael Henriksson <mikael@zoolutions.se>
11
- class UniqueArgs
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::SidekiqWorkerMethods
14
-
15
- # Convenience method for returning a digest
16
- # @param [Hash] item a Sidekiq job hash
17
- # @return [String] a unique digest
18
- def self.digest(item)
19
- new(item).unique_digest
20
- end
21
-
22
- # The sidekiq job hash
23
- # @return [Hash] the Sidekiq job hash
24
- attr_reader :item
25
-
26
- # @param [Hash] item a Sidekiq job hash
27
- def initialize(item)
28
- @item = item
29
- @worker_class = item[CLASS_KEY]
30
-
31
- add_uniqueness_to_item
32
- end
33
-
34
- # Appends the keys unique_prefix, unique_args and {#unique_digest} to the sidekiq job hash {#item}
35
- # @return [void]
36
- def add_uniqueness_to_item
37
- item[UNIQUE_PREFIX_KEY] ||= unique_prefix
38
- item[UNIQUE_ARGS_KEY] = unique_args(item[ARGS_KEY])
39
- item[UNIQUE_DIGEST_KEY] = unique_digest
40
- end
41
-
42
- # Memoized unique_digest
43
- # @return [String] a unique digest
44
- def unique_digest
45
- @unique_digest ||= create_digest
46
- end
47
-
48
- # Creates a namespaced unique digest based on the {#digestable_hash} and the {#unique_prefix}
49
- # @return [String] a unique digest
50
- def create_digest
51
- digest = OpenSSL::Digest::MD5.hexdigest(Sidekiq.dump_json(digestable_hash))
52
- "#{unique_prefix}:#{digest}"
53
- end
54
-
55
- # A prefix to use as namespace for the {#unique_digest}
56
- # @return [String] a unique digest
57
- def unique_prefix
58
- worker_options[UNIQUE_PREFIX_KEY] || SidekiqUniqueJobs.config.unique_prefix
59
- end
60
-
61
- # Filter a hash to use for digest
62
- # @return [Hash] to use for digest
63
- def digestable_hash
64
- @item.slice(CLASS_KEY, QUEUE_KEY, UNIQUE_ARGS_KEY).tap do |hash|
65
- hash.delete(QUEUE_KEY) if unique_across_queues?
66
- hash.delete(CLASS_KEY) if unique_across_workers?
67
- end
68
- end
69
-
70
- # The unique arguments to use for creating a lock
71
- # @return [Array] the arguments filters by the {#filtered_args} method if {#unique_args_enabled?}
72
- def unique_args(args)
73
- return filtered_args(args) if unique_args_enabled?
74
-
75
- args
76
- end
77
-
78
- # Checks if we should disregard the queue when creating the unique digest
79
- # @return [true, false]
80
- def unique_across_queues?
81
- item[UNIQUE_ACROSS_QUEUES_KEY] || worker_options[UNIQUE_ACROSS_QUEUES_KEY] ||
82
- item[UNIQUE_ON_ALL_QUEUES_KEY] || worker_options[UNIQUE_ON_ALL_QUEUES_KEY] # TODO: Remove in v 6.1
83
- end
84
-
85
- # Checks if we should disregard the worker when creating the unique digest
86
- # @return [true, false]
87
- def unique_across_workers?
88
- item[UNIQUE_ACROSS_WORKERS_KEY] || worker_options[UNIQUE_ACROSS_WORKERS_KEY]
89
- end
90
-
91
- # Checks if the worker class has been enabled for unique_args?
92
- # @return [true, false]
93
- def unique_args_enabled?
94
- unique_args_method # && !unique_args_method.is_a?(Boolean)
95
- end
96
-
97
- # Filters unique arguments by proc or symbol
98
- # @param [Array] args the arguments passed to the sidekiq worker
99
- # @return [Array] {#filter_by_proc} when {#unique_args_method} is a Proc
100
- # @return [Array] {#filter_by_symbol} when {#unique_args_method} is a Symbol
101
- # @return [Array] args unfiltered when neither of the above
102
- def filtered_args(args)
103
- return args if args.empty?
104
-
105
- json_args = Normalizer.jsonify(args)
106
-
107
- case unique_args_method
108
- when Proc
109
- filter_by_proc(json_args)
110
- when Symbol
111
- filter_by_symbol(json_args)
112
- else
113
- log_debug("#{__method__} arguments not filtered (using all arguments for uniqueness)")
114
- json_args
115
- end
116
- end
117
-
118
- # Filters unique arguments by proc configured in the sidekiq worker
119
- # @param [Array] args the arguments passed to the sidekiq worker
120
- # @return [Array] with the filtered arguments
121
- def filter_by_proc(args)
122
- unique_args_method.call(args)
123
- end
124
-
125
- # Filters unique arguments by method configured in the sidekiq worker
126
- # @param [Array] args the arguments passed to the sidekiq worker
127
- # @return [Array] unfiltered unless {#worker_method_defined?}
128
- # @return [Array] with the filtered arguments
129
- def filter_by_symbol(args)
130
- return args unless worker_method_defined?(unique_args_method)
131
-
132
- worker_class.send(unique_args_method, args)
133
- rescue ArgumentError => ex
134
- log_fatal(ex)
135
- args
136
- end
137
-
138
- # The method to use for filtering unique arguments
139
- def unique_args_method
140
- @unique_args_method ||= worker_options[UNIQUE_ARGS_KEY]
141
- @unique_args_method ||= :unique_args if worker_method_defined?(:unique_args)
142
- @unique_args_method ||= default_unique_args_method
143
- end
144
-
145
- # The global worker options defined in Sidekiq directly
146
- def default_unique_args_method
147
- Sidekiq.default_worker_options.stringify_keys[UNIQUE_ARGS_KEY]
148
- end
149
- end
150
- end
@@ -1,103 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module SidekiqUniqueJobs
4
- # Utility module to help manage unique keys in redis.
5
- # Useful for deleting keys that for whatever reason wasn't deleted
6
- #
7
- # @author Mikael Henriksson <mikael@zoolutions.se>
8
- module Util
9
- DEFAULT_COUNT = 1_000
10
- SCAN_PATTERN = "*"
11
-
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::Connection
14
- extend self
15
-
16
- # Find unique keys in redis
17
- #
18
- # @param [String] pattern a pattern to scan for in redis
19
- # @param [Integer] count the maximum number of keys to delete
20
- # @return [Array<String>] an array with active unique keys
21
- def keys(pattern = SCAN_PATTERN, count = DEFAULT_COUNT)
22
- return redis(&:keys) if pattern.nil?
23
-
24
- redis { |conn| conn.scan_each(match: prefix(pattern), count: count).to_a }
25
- end
26
-
27
- # Find unique keys with ttl
28
- # @param [String] pattern a pattern to scan for in redis
29
- # @param [Integer] count the maximum number of keys to delete
30
- # @return [Hash<String, Integer>] a hash with active unique keys and corresponding ttl
31
- def keys_with_ttl(pattern = SCAN_PATTERN, count = DEFAULT_COUNT)
32
- hash = {}
33
- redis do |conn|
34
- conn.scan_each(match: prefix(pattern), count: count).each do |key|
35
- hash[key] = conn.ttl(key)
36
- end
37
- end
38
- hash
39
- end
40
-
41
- # Deletes unique keys from redis
42
- #
43
- # @param [String] pattern a pattern to scan for in redis
44
- # @param [Integer] count the maximum number of keys to delete
45
- # @return [Integer] the number of keys deleted
46
- def del(pattern = SCAN_PATTERN, count = 0)
47
- raise ArgumentError, "Please provide a number of keys to delete greater than zero" if count.zero?
48
-
49
- pattern = suffix(pattern)
50
-
51
- log_debug { "Deleting keys by: #{pattern}" }
52
- keys, time = timed { keys(pattern, count) }
53
- key_size = keys.size
54
- log_debug { "#{key_size} keys found in #{time} sec." }
55
- _, time = timed { batch_delete(keys) }
56
- log_debug { "Deleted #{key_size} keys in #{time} sec." }
57
-
58
- key_size
59
- end
60
-
61
- private
62
-
63
- def batch_delete(keys)
64
- redis do |conn|
65
- keys.each_slice(500) do |chunk|
66
- conn.pipelined do
67
- chunk.each do |key|
68
- conn.del key
69
- end
70
- end
71
- end
72
- end
73
- end
74
-
75
- def timed
76
- start = current_time
77
- result = yield
78
- elapsed = (current_time - start).round(2)
79
- [result, elapsed]
80
- end
81
-
82
- def current_time
83
- Time.now
84
- end
85
-
86
- def prefix(key)
87
- return key if unique_prefix.nil?
88
- return key if key.start_with?("#{unique_prefix}:")
89
-
90
- "#{unique_prefix}:#{key}"
91
- end
92
-
93
- def suffix(key)
94
- return "#{key}*" unless key.end_with?(":*")
95
-
96
- key
97
- end
98
-
99
- def unique_prefix
100
- SidekiqUniqueJobs.config.unique_prefix
101
- end
102
- end
103
- end
@@ -1,28 +0,0 @@
1
- <header class="row">
2
- <div class="col-sm-5">
3
- <h3><%= t('Unique Digest') %> - <a class="btn btn-default btn-xs" href="<%= root_path %>unique_digests"><%= t('GoBack') %></a></h3>
4
- </div>
5
- </header>
6
-
7
- <% if @unique_keys.size.positive? %>
8
- <div class="table_container">
9
- <table class="table table-striped table-bordered table-hover">
10
- <thead>
11
- <tr>
12
- <th>
13
- <%= t('Keys') %>
14
- <span class="small text-muted">for (<%= @digest %>)</span>
15
- </th>
16
- </tr>
17
- </thead>
18
- <% @unique_keys.each do |key| %>
19
- <tr><td colspan="2"><%= key %></td></tr>
20
- <% end %>
21
- </table>
22
- </div>
23
- <form action="<%= root_path %>unique_digests/<%= @digest %>/delete" method="get">
24
- <%= csrf_tag %>
25
- <a class="btn btn-default btn-xs" href="<%= root_path %>unique_digests"><%= t('GoBack') %></a>
26
- <input class="btn btn-danger btn-xs flip" type="submit" name="delete" value="<%= t('Delete') %>" data-confirm="<%= t('AreYouSure') %>" />
27
- </form>
28
- <% end %>
@@ -1,46 +0,0 @@
1
- <header class="row">
2
- <div class="col-sm-5">
3
- <h3><%= t('Unique Digests') %></h3>
4
- </div>
5
- <form action="<%= root_path %>unique_digests" class="form form-inline" method="get">
6
- <%= csrf_tag %>
7
- <input name="filter" class="form-control" type="text" value="<%= @filter %>" />
8
- <button class="btn btn-default" type="submit"><%= t('Filter') %></button>
9
- </form>
10
- <% if @unique_digests.size > 0 && @total_size > @count.to_i %>
11
- <div class="col-sm-4">
12
- <%= erb unique_template(:_paging), locals: { url: "#{root_path}unique_digests" } %>
13
- </div>
14
- <% end %>
15
- </header>
16
-
17
- <% if @unique_digests.size.positive? %>
18
- <div class="table_container">
19
- <table class="table table-striped table-bordered table-hover">
20
- <thead>
21
- <tr>
22
- <th><%= t('Delete') %></th>
23
- <th><%= t('Digest') %></th>
24
- </tr>
25
- </thead>
26
- <% @unique_digests.each do |digest| %>
27
- <tr>
28
- <td>
29
- <form action="<%= root_path %>unique_digests/<%= digest %>/delete" method="get">
30
- <%= csrf_tag %>
31
- <input name="digest" value="<%= h digest %>" type="hidden" />
32
- <input class="btn btn-danger btn-xs" type="submit" name="delete" value="<%= t('Delete') %>" data-confirm="<%= t('AreYouSure') %>" />
33
- </form>
34
- </td>
35
- <td>
36
- <a href="<%= root_path %>unique_digests/<%= digest %>"><%= digest %></a>
37
- </td>
38
- </tr>
39
- <% end %>
40
- </table>
41
-
42
- <form action="<%= root_path %>unique_digests/delete_all" method="get">
43
- <input class="btn btn-danger btn-xs" type="submit" name="delete_all" value="<%= t('DeleteAll') %>" data-confirm="<%= t('AreYouSure') %>" />
44
- </form>
45
- </div>
46
- <% end %>
@@ -1,21 +0,0 @@
1
- local unique_key = KEYS[1]
2
- local job_id = ARGV[1]
3
- local expires = tonumber(ARGV[2])
4
- local stored_jid = redis.pcall('get', unique_key)
5
-
6
- if stored_jid then
7
- if stored_jid == job_id then
8
- return 1
9
- else
10
- return 0
11
- end
12
- end
13
-
14
- if redis.call('SET', unique_key, job_id, 'nx') then
15
- if expires then
16
- redis.call('EXPIRE', unique_key, expires)
17
- end
18
- return 1
19
- else
20
- return 0
21
- end
@@ -1,13 +0,0 @@
1
- local grabbed_key = KEYS[1]
2
- local unique_digest = KEYS[2]
3
-
4
- local job_id = ARGV[1]
5
- local current_time = tonumber(ARGV[2])
6
-
7
- local old_token = redis.call('GET', unique_digest)
8
- if old_token then
9
- if old_token == job_id or old_token == '2' then
10
- redis.call('DEL', unique_digest)
11
- redis.call('HSET', grabbed_key, job_id, current_time)
12
- end
13
- end
data/redis/delete.lua DELETED
@@ -1,14 +0,0 @@
1
- local exists_key = KEYS[1]
2
- local grabbed_key = KEYS[2]
3
- local available_key = KEYS[3]
4
- local version_key = KEYS[4]
5
- local unique_keys = KEYS[5]
6
- local unique_digest = KEYS[6] -- TODO: Legacy support (Remove in v6.1)
7
-
8
- redis.call('DEL', exists_key)
9
- redis.call('SREM', unique_keys, unique_digest)
10
- redis.call('DEL', grabbed_key)
11
- redis.call('DEL', available_key)
12
- redis.call('DEL', version_key)
13
- redis.call('DEL', 'uniquejobs') -- TODO: Old job hash, just drop the darn thing
14
- redis.call('DEL', unique_digest) -- TODO: Legacy support (Remove in v6.1)