rocketjob 5.3.1 → 5.4.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 42be7df83c1d38b5ef3a41751e3cc6afd6fb885638a7d144bfd1f71ff5de441e
4
- data.tar.gz: fd3942b7fe1aa2d76bda3395561bea56122b9eab58c0ae65d99fbea76e3d771c
3
+ metadata.gz: 0bf1e6e7eb0a44011830a99c689c56768857d8de4c4e2c95e6b536feb6f63aab
4
+ data.tar.gz: '0172629dc33a08bfbd976599b619a41495680124dcd7df9a08e61308f0d043b4'
5
5
  SHA512:
6
- metadata.gz: 324641130fd1bb0724058d81cdf33dad02bbdeef2047413a479dcb0bb0782be37be3e0aad9b5c57d9ea200e6cc30889ce06ce8130716f91399472bf317ed4089
7
- data.tar.gz: dfe9e7f121e7fee9c4713d5fc819e3889d1e0d1d208ac14cd4678f83ce0f802f51e2067cd10d687301a9f7470a609eab6259036e191ae8e6198f7a7830cbbd82
6
+ metadata.gz: 6ac52c54834a7370dfdb9e7460ca1818c19b91beb2afc875ba0edab63a9b0d9480bc704a35bd155825f3c0b280efea9a406783212e8621fcbf6b046b8af62e78
7
+ data.tar.gz: f044d85630a0b6b36b07f7092ec18ac452d608cb9c7dbb7d26235493c51df584d7479190b24dfd1805ba7a086c8188e48ab8b02590d0748d013c6fb462b01505
@@ -18,7 +18,7 @@ module RocketJob
18
18
  raise "Category #{category.inspect}, must be registered in input_categories: #{input_categories.inspect}"
19
19
  end
20
20
 
21
- (@inputs ||= {})[category] ||= RocketJob::Sliced::Input.new(**rocket_job_io_slice_arguments("inputs", category))
21
+ (@inputs ||= {})[category] ||= RocketJob::Sliced.factory(:input, category, self)
22
22
  end
23
23
 
24
24
  # Returns [RocketJob::Sliced::Output] output collection for holding output slices
@@ -34,7 +34,7 @@ module RocketJob
34
34
  raise "Category #{category.inspect}, must be registered in output_categories: #{output_categories.inspect}"
35
35
  end
36
36
 
37
- (@outputs ||= {})[category] ||= RocketJob::Sliced::Output.new(**rocket_job_io_slice_arguments("outputs", category))
37
+ (@outputs ||= {})[category] ||= RocketJob::Sliced.factory(:output, category, self)
38
38
  end
39
39
 
40
40
  # Upload the supplied file, io, IOStreams::Path, or IOStreams::Stream.
@@ -355,8 +355,18 @@ module RocketJob
355
355
 
356
356
  return output(category).download(header_line: header_line, &block) if block
357
357
 
358
- IOStreams.new(stream).writer(:line, **args) do |io|
359
- output(category).download(header_line: header_line) { |record| io << record }
358
+ output_collection = output(category)
359
+
360
+ if output_collection.binary?
361
+ IOStreams.new(stream).stream(:none).writer(**args) do |io|
362
+ raise(ArgumenError, "A `header_line` is not supported with binary output collections") if header_line
363
+
364
+ output_collection.download { |record| io << record[:binary] }
365
+ end
366
+ else
367
+ IOStreams.new(stream).writer(:line, **args) do |io|
368
+ output_collection.download(header_line: header_line) { |record| io << record }
369
+ end
360
370
  end
361
371
  end
362
372
 
@@ -393,21 +403,6 @@ module RocketJob
393
403
  RocketJob::Sliced::Writer::Output.collect(self, input_slice) { |writer| writer << result }
394
404
  end
395
405
  end
396
-
397
- private
398
-
399
- def rocket_job_io_slice_arguments(collection_type, category)
400
- collection_name = "rocket_job.#{collection_type}.#{id}"
401
- collection_name << ".#{category}" unless category == :main
402
-
403
- args = {collection_name: collection_name, slice_size: slice_size}
404
- if encrypt
405
- args[:slice_class] = Sliced::EncryptedSlice
406
- elsif compress
407
- args[:slice_class] = Sliced::CompressedSlice
408
- end
409
- args
410
- end
411
406
  end
412
407
  end
413
408
  end
@@ -44,12 +44,12 @@ module RocketJob
44
44
  # Compress uploaded records.
45
45
  # The fields are not affected in any way, only the data stored in the
46
46
  # records and results collections will compressed
47
- field :compress, type: Boolean, default: false, class_attribute: true
47
+ field :compress, type: Object, default: false, class_attribute: true
48
48
 
49
49
  # Encrypt uploaded records.
50
50
  # The fields are not affected in any way, only the data stored in the
51
51
  # records and results collections will be encrypted
52
- field :encrypt, type: Boolean, default: false, class_attribute: true
52
+ field :encrypt, type: Object, default: false, class_attribute: true
53
53
 
54
54
  #
55
55
  # Values that jobs can also update during processing
@@ -12,6 +12,7 @@ module RocketJob
12
12
  included do
13
13
  field :tabular_input_header, type: Array, class_attribute: true, user_editable: true
14
14
  field :tabular_input_format, type: Symbol, default: :csv, class_attribute: true, user_editable: true
15
+ field :tabular_input_options, type: Hash, class_attribute: true
15
16
 
16
17
  # tabular_input_mode: [:line | :array | :hash]
17
18
  # :line
@@ -53,7 +54,9 @@ module RocketJob
53
54
  input_stream = stream.nil? ? nil : IOStreams.new(stream)
54
55
 
55
56
  if stream && (tabular_input_type == :text)
56
- input_stream.option_or_stream(:encode, encoding: "UTF-8", cleaner: :printable, replace: "")
57
+ # Cannot change the length of fixed width lines
58
+ replace = tabular_input_format == :fixed ? " " : ""
59
+ input_stream.option_or_stream(:encode, encoding: "UTF-8", cleaner: :printable, replace: replace)
57
60
  end
58
61
 
59
62
  # If an input header is not required, then we don't extract it'
@@ -96,14 +99,15 @@ module RocketJob
96
99
  allowed_columns: tabular_input_white_list,
97
100
  required_columns: tabular_input_required,
98
101
  skip_unknown: tabular_input_skip_unknown,
99
- format: tabular_input_format
102
+ format: tabular_input_format,
103
+ format_options: tabular_input_options&.deep_symbolize_keys
100
104
  )
101
105
  end
102
106
 
103
107
  def tabular_input_render
104
- unless tabular_input_header.blank? && tabular_input.header?
105
- @rocket_job_input = tabular_input.record_parse(@rocket_job_input)
106
- end
108
+ return if tabular_input_header.blank? && tabular_input.header?
109
+
110
+ @rocket_job_input = tabular_input.record_parse(@rocket_job_input)
107
111
  end
108
112
 
109
113
  # Cleanse custom input header if supplied.
@@ -12,6 +12,7 @@ module RocketJob
12
12
  included do
13
13
  field :tabular_output_header, type: Array, class_attribute: true, user_editable: true, copy_on_restart: true
14
14
  field :tabular_output_format, type: Symbol, default: :csv, class_attribute: true, user_editable: true, copy_on_restart: true
15
+ field :tabular_output_options, type: Hash, class_attribute: true
15
16
 
16
17
  validates_inclusion_of :tabular_output_format, in: IOStreams::Tabular.registered_formats
17
18
 
@@ -31,8 +32,9 @@ module RocketJob
31
32
 
32
33
  # Overrides: `RocketJob::Batch::IO#download` to add the `tabular_output_header`.
33
34
  def download(file_name_or_io = nil, category: :main, **args, &block)
34
- # No header required
35
- return super(file_name_or_io, category: category, **args, &block) unless tabular_output.requires_header?(category)
35
+ unless tabular_output.requires_header?(category)
36
+ return super(file_name_or_io, category: category, **args, &block)
37
+ end
36
38
 
37
39
  header = tabular_output.render_header(category)
38
40
  super(file_name_or_io, header_line: header, category: category, **args, &block)
@@ -43,7 +45,11 @@ module RocketJob
43
45
  # Delimited instance used for this slice, by a single worker (thread)
44
46
  def tabular_output
45
47
  @tabular_output ||= Tabular.new(
46
- main: IOStreams::Tabular.new(columns: tabular_output_header, format: tabular_output_format)
48
+ main: IOStreams::Tabular.new(
49
+ columns: tabular_output_header,
50
+ format: tabular_output_format,
51
+ format_options: tabular_output_options&.deep_symbolize_keys
52
+ )
47
53
  )
48
54
  end
49
55
 
@@ -46,7 +46,7 @@ module RocketJob
46
46
  next if slice.failed?
47
47
 
48
48
  slice.fail_on_exception!(re_raise_exceptions) { rocket_job_process_slice(slice) }
49
- elsif record_count && rocket_job_batch_complete?(worker.name)
49
+ elsif record_count && fail_on_exception!(re_raise_exceptions) { rocket_job_batch_complete?(worker.name) }
50
50
  return false
51
51
  else
52
52
  logger.debug "No more work available for this job"
@@ -0,0 +1,12 @@
1
+ require "mongoid/fields/validators/macro"
2
+ require "semantic_logger"
3
+ module RocketJob
4
+ module RemoveMongoidWarnings
5
+ # Remove annoying warnings about Symbols type being deprecated.
6
+ def validate_options(*params)
7
+ SemanticLogger.silence(:error) { super(*params) }
8
+ end
9
+ end
10
+ end
11
+
12
+ ::Mongoid::Fields::Validators::Macro.extend(RocketJob::RemoveMongoidWarnings)
@@ -25,7 +25,7 @@ module RocketJob
25
25
 
26
26
  self.destroy_on_complete = false
27
27
  # Number of times to automatically retry the copy. Set to `0` for no retry attempts.
28
- self.retry_limit = 5
28
+ self.retry_limit = 10
29
29
 
30
30
  # File names in IOStreams URL format.
31
31
  field :source_url, type: String, user_editable: true
@@ -1,8 +1,5 @@
1
- begin
2
- require "active_record"
3
- rescue LoadError
4
- raise 'RocketJob::Jobs::ReEncrypt::RelationalJob uses ActiveRecord to obtain the database connection, please install the gem "activerecord".'
5
- end
1
+ require "active_record"
2
+ require "sync_attr"
6
3
 
7
4
  # Batch Worker to Re-encrypt all encrypted fields in MySQL that start with `encrytped_`.
8
5
  #
@@ -40,7 +40,7 @@ module RocketJob
40
40
  job.id = job_id if job_id
41
41
  upload_file(job)
42
42
  job.save!
43
- rescue StandardError => e
43
+ rescue Exception => e
44
44
  # Prevent partial uploads
45
45
  job&.cleanup! if job.respond_to?(:cleanup!)
46
46
  raise(e)
@@ -45,20 +45,10 @@ module RocketJob
45
45
  end
46
46
  end
47
47
 
48
- # Returns [Time] the next time this job will be scheduled to run at.
49
- #
50
- # Parameters
51
- # time: [Time]
52
- # The next time as of this time.
53
- # Default: Time.now
54
- def rocket_job_cron_next_time(time = Time.now)
55
- Fugit::Cron.new(cron_schedule).next_time.to_utc_time
56
- end
57
-
58
48
  def rocket_job_cron_set_run_at
59
- return unless cron_schedule
49
+ return if cron_schedule.nil? || !(cron_schedule_changed? && !run_at_changed?)
60
50
 
61
- self.run_at = rocket_job_cron_next_time if cron_schedule_changed? && !run_at_changed?
51
+ self.run_at = Fugit::Cron.new(cron_schedule).next_time.to_utc_time
62
52
  end
63
53
  end
64
54
  end
@@ -29,6 +29,9 @@ module RocketJob
29
29
  class_attribute :throttle_running_jobs
30
30
  self.throttle_running_jobs = nil
31
31
 
32
+ # Allow jobs to be throttled by group name instance of the job class name.
33
+ field :throttle_group, type: String, class_attribute: true, user_editable: true, copy_on_restart: true
34
+
32
35
  define_throttle :throttle_running_jobs_exceeded?
33
36
  end
34
37
 
@@ -38,9 +41,10 @@ module RocketJob
38
41
  def throttle_running_jobs_exceeded?
39
42
  return unless throttle_running_jobs&.positive?
40
43
 
41
- # Cannot use this class since it will include instances of parent job classes.
42
44
  RocketJob::Job.with(read: {mode: :primary}) do |conn|
43
- conn.running.where("_type" => self.class.name, :id.ne => id).count >= throttle_running_jobs
45
+ query = {:id.ne => id}
46
+ throttle_group ? query["throttle_group"] = throttle_group : query["_type"] = self.class.name
47
+ conn.running.where(query).count >= throttle_running_jobs
44
48
  end
45
49
  end
46
50
  end
@@ -73,12 +73,13 @@ module RocketJob
73
73
  if failed? || !may_fail?
74
74
  self.exception = JobException.from_exception(e)
75
75
  exception.worker_name = worker_name
76
- save! unless new_record? || destroyed?
77
- elsif new_record? || destroyed?
78
- fail(worker_name, e)
79
76
  else
80
- fail!(worker_name, e)
77
+ fail(worker_name, e)
81
78
  end
79
+
80
+ # Prevent validation failures from failing the job
81
+ save(validate: false) unless new_record? || destroyed?
82
+
82
83
  raise e if re_raise_exceptions
83
84
  end
84
85
  end
@@ -0,0 +1,91 @@
1
+ module RocketJob
2
+ module Sliced
3
+ autoload :BZip2OutputSlice, "rocket_job/sliced/bzip2_output_slice"
4
+ autoload :CompressedSlice, "rocket_job/sliced/compressed_slice"
5
+ autoload :EncryptedSlice, "rocket_job/sliced/encrypted_slice"
6
+ autoload :Input, "rocket_job/sliced/input"
7
+ autoload :Output, "rocket_job/sliced/output"
8
+ autoload :Slice, "rocket_job/sliced/slice"
9
+ autoload :Slices, "rocket_job/sliced/slices"
10
+ autoload :Store, "rocket_job/sliced/store"
11
+
12
+ module Writer
13
+ autoload :Input, "rocket_job/sliced/writer/input"
14
+ autoload :Output, "rocket_job/sliced/writer/output"
15
+ end
16
+
17
+ # Returns [RocketJob::Sliced::Slices] for the relevant type and category.
18
+ #
19
+ # Supports compress and encrypt with [true|false|Hash] values.
20
+ # When [Hash] they must specify whether the apply to the input or output collection types.
21
+ #
22
+ # Example, compress both input and output collections:
23
+ # class MyJob < RocketJob::Job
24
+ # include RocketJob::Batch
25
+ # self.compress = true
26
+ # end
27
+ #
28
+ # Example, compress just the output collections:
29
+ # class MyJob < RocketJob::Job
30
+ # include RocketJob::Batch
31
+ # self.compress = {output: true}
32
+ # end
33
+ #
34
+ # To use the specialized BZip output compressor, and the regular compressor for the input collections:
35
+ # class MyJob < RocketJob::Job
36
+ # include RocketJob::Batch
37
+ # self.compress = {output: :bzip2, input: true}
38
+ # end
39
+ def self.factory(type, category, job)
40
+ raise(ArgumentError, "Unknown type: #{type.inspect}") unless %i[input output].include?(type)
41
+
42
+ collection_name = "rocket_job.#{type}s.#{job.id}"
43
+ collection_name << ".#{category}" unless category == :main
44
+
45
+ args = {collection_name: collection_name, slice_size: job.slice_size}
46
+ klass = slice_class(type, job)
47
+ args[:slice_class] = klass if klass
48
+
49
+ if type == :input
50
+ RocketJob::Sliced::Input.new(args)
51
+ else
52
+ RocketJob::Sliced::Output.new(args)
53
+ end
54
+ end
55
+
56
+ private
57
+
58
+ # Parses the encrypt and compress options to determine which slice serializer to use.
59
+ # `encrypt` takes priority over any `compress` option.
60
+ def self.slice_class(type, job)
61
+ encrypt = extract_value(type, job.encrypt)
62
+ compress = extract_value(type, job.compress)
63
+
64
+ if encrypt
65
+ case encrypt
66
+ when true
67
+ EncryptedSlice
68
+ else
69
+ raise(ArgumentError, "Unknown job `encrypt` value: #{compress}") unless compress.is_a?(Slices)
70
+ # Returns the supplied class to use for encryption.
71
+ encrypt
72
+ end
73
+ elsif compress
74
+ case compress
75
+ when true
76
+ CompressedSlice
77
+ when :bzip2
78
+ BZip2OutputSlice
79
+ else
80
+ raise(ArgumentError, "Unknown job `compress` value: #{compress}") unless compress.is_a?(Slices)
81
+ # Returns the supplied class to use for compression.
82
+ compress
83
+ end
84
+ end
85
+ end
86
+
87
+ def self.extract_value(type, value)
88
+ value.is_a?(Hash) ? value[type] : value
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,43 @@
1
+ module RocketJob
2
+ module Sliced
3
+ # This is a specialized output serializer that renders each output slice as a single BZip2 compressed stream.
4
+ # BZip2 allows multiple output streams to be written into a single BZip2 file.
5
+ #
6
+ # Notes:
7
+ # * The `bzip2` linux command line utility supports multiple embedded BZip2 stream,
8
+ # but some other custom implementations may not. They may only read the first slice and stop.
9
+ # * It is only designed for use on output collections.
10
+ #
11
+ # To download the output when using this slice:
12
+ #
13
+ # # Download the binary BZip2 streams into a single file
14
+ # IOStreams.path(output_file_name).stream(:none).writer do |io|
15
+ # job.download { |slice| io << slice[:binary] }
16
+ # end
17
+ class BZip2OutputSlice < ::RocketJob::Sliced::Slice
18
+ # This is a specialized binary slice for creating binary data from each slice
19
+ # that must be downloaded as-is into output files.
20
+ def self.binary?
21
+ true
22
+ end
23
+
24
+ private
25
+
26
+ def parse_records
27
+ records = attributes.delete("records")
28
+
29
+ # Convert BSON::Binary to a string
30
+ @records = [{binary: records.data}]
31
+ end
32
+
33
+ def serialize_records
34
+ return [] if @records.nil? || @records.empty?
35
+
36
+ lines = records.to_a.join("\n") + "\n"
37
+ s = StringIO.new
38
+ IOStreams::Bzip2::Writer.stream(s) { |io| io.write(lines) }
39
+ BSON::Binary.new(s.string)
40
+ end
41
+ end
42
+ end
43
+ end
@@ -5,7 +5,7 @@ module RocketJob
5
5
  # Create indexes before uploading
6
6
  create_indexes
7
7
  Writer::Input.collect(self, on_first: on_first, &block)
8
- rescue StandardError => e
8
+ rescue Exception => e
9
9
  drop
10
10
  raise(e)
11
11
  end
@@ -73,7 +73,7 @@ module RocketJob
73
73
  count += 1
74
74
  end
75
75
  count
76
- rescue StandardError => e
76
+ rescue Exception => e
77
77
  drop
78
78
  raise(e)
79
79
  end
@@ -91,7 +91,7 @@ module RocketJob
91
91
  count += 1
92
92
  end
93
93
  count
94
- rescue StandardError => e
94
+ rescue Exception => e
95
95
  drop
96
96
  raise(e)
97
97
  end
@@ -94,6 +94,12 @@ module RocketJob
94
94
  end
95
95
  end
96
96
 
97
+ # Returns whether this is a specialized binary slice for creating binary data from each slice
98
+ # that is then just downloaded as-is into output files.
99
+ def self.binary?
100
+ false
101
+ end
102
+
97
103
  # `records` array has special handling so that it can be modified in place instead of having
98
104
  # to replace the entire array every time. For example, when appending lines with `<<`.
99
105
  def records
@@ -42,6 +42,12 @@ module RocketJob
42
42
  slice
43
43
  end
44
44
 
45
+ # Returns whether this collection contains specialized binary slices for creating binary data from each slice
46
+ # that is then just downloaded as-is into output files.
47
+ def binary?
48
+ slice_class.binary?
49
+ end
50
+
45
51
  # Returns output slices in the order of their id
46
52
  # which is usually the order in which they were written.
47
53
  def each
@@ -9,16 +9,22 @@ module RocketJob
9
9
  @supervisor = supervisor
10
10
  end
11
11
 
12
- def kill(server_id: nil, name: nil, wait_timeout: 3)
12
+ def kill(server_id: nil, name: nil, wait_timeout: 5)
13
13
  return unless my_server?(server_id, name)
14
14
 
15
15
  supervisor.synchronize do
16
+ Supervisor.shutdown!
17
+
18
+ supervisor.logger.info("Stopping Pool")
16
19
  supervisor.worker_pool.stop
17
- supervisor.worker_pool.join(wait_timeout)
20
+ unless supervisor.worker_pool.living_count == 0
21
+ supervisor.logger.info("Giving pool #{wait_timeout} seconds to terminate")
22
+ sleep(wait_timeout)
23
+ end
24
+ supervisor.logger.info("Kill Pool")
18
25
  supervisor.worker_pool.kill
19
26
  end
20
27
 
21
- Supervisor.shutdown!
22
28
  logger.info "Killed"
23
29
  end
24
30
 
@@ -55,7 +55,9 @@ module RocketJob
55
55
 
56
56
  def stop!
57
57
  server.stop! if server.may_stop?
58
- worker_pool.stop
58
+ synchronize do
59
+ worker_pool.stop
60
+ end
59
61
  until worker_pool.join
60
62
  logger.info "Waiting for workers to finish processing ..."
61
63
  # One or more workers still running so update heartbeat so that server reports "alive".
@@ -1,3 +1,3 @@
1
1
  module RocketJob
2
- VERSION = "5.3.1".freeze
2
+ VERSION = "5.4.0".freeze
3
3
  end
@@ -61,6 +61,7 @@ module RocketJob
61
61
  # Kill Worker threads
62
62
  def kill
63
63
  workers.each(&:kill)
64
+ workers.clear
64
65
  end
65
66
 
66
67
  # Wait for all workers to stop.
@@ -13,6 +13,9 @@ require "rocket_job/extensions/mongoid/clients/options"
13
13
  require "rocket_job/extensions/mongoid/contextual/mongo"
14
14
  require "rocket_job/extensions/mongoid/factory"
15
15
 
16
+ # Apply patches for deprecated Symbol type
17
+ require "rocket_job/extensions/mongoid/remove_warnings"
18
+
16
19
  # @formatter:off
17
20
  module RocketJob
18
21
  autoload :ActiveWorker, "rocket_job/active_worker"
@@ -26,6 +29,7 @@ module RocketJob
26
29
  autoload :Worker, "rocket_job/worker"
27
30
  autoload :Performance, "rocket_job/performance"
28
31
  autoload :Server, "rocket_job/server"
32
+ autoload :Sliced, "rocket_job/sliced"
29
33
  autoload :Subscriber, "rocket_job/subscriber"
30
34
  autoload :Supervisor, "rocket_job/supervisor"
31
35
  autoload :ThrottleDefinition, "rocket_job/throttle_definition"
@@ -45,10 +49,6 @@ module RocketJob
45
49
  autoload :Transaction, "rocket_job/plugins/job/transaction"
46
50
  autoload :Worker, "rocket_job/plugins/job/worker"
47
51
  end
48
- module Rufus
49
- autoload :CronLine, "rocket_job/plugins/rufus/cron_line"
50
- autoload :ZoTime, "rocket_job/plugins/rufus/zo_time"
51
- end
52
52
  autoload :Cron, "rocket_job/plugins/cron"
53
53
  autoload :Document, "rocket_job/plugins/document"
54
54
  autoload :ProcessingWindow, "rocket_job/plugins/processing_window"
@@ -71,22 +71,9 @@ module RocketJob
71
71
  autoload :SimpleJob, "rocket_job/jobs/simple_job"
72
72
  autoload :UploadFileJob, "rocket_job/jobs/upload_file_job"
73
73
  module ReEncrypt
74
- autoload :RelationalJob, "rocket_job/jobs/re_encrypt/relational_job"
75
- end
76
- end
77
-
78
- module Sliced
79
- autoload :CompressedSlice, "rocket_job/sliced/compressed_slice"
80
- autoload :EncryptedSlice, "rocket_job/sliced/encrypted_slice"
81
- autoload :Input, "rocket_job/sliced/input"
82
- autoload :Output, "rocket_job/sliced/output"
83
- autoload :Slice, "rocket_job/sliced/slice"
84
- autoload :Slices, "rocket_job/sliced/slices"
85
- autoload :Store, "rocket_job/sliced/store"
86
-
87
- module Writer
88
- autoload :Input, "rocket_job/sliced/writer/input"
89
- autoload :Output, "rocket_job/sliced/writer/output"
74
+ if defined?(ActiveRecord) && defined?(SyncAttr)
75
+ autoload :RelationalJob, "rocket_job/jobs/re_encrypt/relational_job"
76
+ end
90
77
  end
91
78
  end
92
79
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rocketjob
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.3.1
4
+ version: 5.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reid Morrison
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-06-15 00:00:00.000000000 Z
11
+ date: 2020-12-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aasm
@@ -108,8 +108,8 @@ dependencies:
108
108
  - - "~>"
109
109
  - !ruby/object:Gem::Version
110
110
  version: '1.3'
111
- description:
112
- email:
111
+ description:
112
+ email:
113
113
  executables:
114
114
  - rocketjob
115
115
  - rocketjob_perf
@@ -148,6 +148,7 @@ files:
148
148
  - lib/rocket_job/extensions/mongoid/clients/options.rb
149
149
  - lib/rocket_job/extensions/mongoid/contextual/mongo.rb
150
150
  - lib/rocket_job/extensions/mongoid/factory.rb
151
+ - lib/rocket_job/extensions/mongoid/remove_warnings.rb
151
152
  - lib/rocket_job/extensions/rocket_job_adapter.rb
152
153
  - lib/rocket_job/heartbeat.rb
153
154
  - lib/rocket_job/job.rb
@@ -185,6 +186,8 @@ files:
185
186
  - lib/rocket_job/server.rb
186
187
  - lib/rocket_job/server/model.rb
187
188
  - lib/rocket_job/server/state_machine.rb
189
+ - lib/rocket_job/sliced.rb
190
+ - lib/rocket_job/sliced/bzip2_output_slice.rb
188
191
  - lib/rocket_job/sliced/compressed_slice.rb
189
192
  - lib/rocket_job/sliced/encrypted_slice.rb
190
193
  - lib/rocket_job/sliced/input.rb
@@ -209,7 +212,7 @@ homepage: http://rocketjob.io
209
212
  licenses:
210
213
  - Apache-2.0
211
214
  metadata: {}
212
- post_install_message:
215
+ post_install_message:
213
216
  rdoc_options: []
214
217
  require_paths:
215
218
  - lib
@@ -217,7 +220,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
217
220
  requirements:
218
221
  - - ">="
219
222
  - !ruby/object:Gem::Version
220
- version: '2.3'
223
+ version: '2.5'
221
224
  required_rubygems_version: !ruby/object:Gem::Requirement
222
225
  requirements:
223
226
  - - ">="
@@ -225,7 +228,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
225
228
  version: '0'
226
229
  requirements: []
227
230
  rubygems_version: 3.0.8
228
- signing_key:
231
+ signing_key:
229
232
  specification_version: 4
230
233
  summary: Ruby's missing batch processing system.
231
234
  test_files: []