inst_data_shipper 0.1.0.beta1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ module InstDataShipper
4
4
  include Concerns::Chunking
5
5
 
6
6
  def chunk_data(generator, table:, extra: nil)
7
- warehouse_name = table_def[:warehouse_name]
7
+ warehouse_name = table[:warehouse_name]
8
8
 
9
9
  super(generator) do |batch, idx|
10
10
  bits = [warehouse_name, extra, idx].compact
@@ -5,7 +5,7 @@ module InstDataShipper
5
5
  define_hook :initialize_dump_batch
6
6
  define_hook :finalize_dump_batch
7
7
 
8
- def self.perform_dump(destinations:)
8
+ def self.perform_dump(destinations)
9
9
  raise "Must subclass Dumper to use perform_dump" if self == Dumper
10
10
 
11
11
  dumper = new(destinations)
@@ -14,48 +14,166 @@ module InstDataShipper
14
14
  dumper.tracker
15
15
  end
16
16
 
17
- protected
18
-
19
- attr_reader :executor
17
+ def self.define(include: [], schema: , &blk)
18
+ Class.new(self) do
19
+ include(*include)
20
20
 
21
- def initialize(destinations = nil, executor: nil)
22
- @raw_destinations = destinations
23
- @executor = executor
21
+ define_method(:enqueue_tasks, &blk)
22
+ define_method(:schema) { schema }
23
+ end
24
24
  end
25
25
 
26
- def enqueue_tasks
27
- raise NotImplementedError
26
+ def self.current(executor: nil)
27
+ cur_batch = Thread.current[CanvasSync::JobBatches::CURRENT_BATCH_THREAD_KEY]
28
+ ctx = cur_batch&.context || {}
29
+ return nil unless ctx[:origin_class].present? && ctx[:tracker_id].present?
30
+
31
+ clazz = ctx[:origin_class]
32
+ clazz = clazz.constantize if clazz.is_a?(String)
33
+ clazz.new(executor: executor)
28
34
  end
29
35
 
36
+ public
37
+
30
38
  def begin_dump
31
39
  raise "Dump already begun" unless @raw_destinations.present?
32
40
 
33
- @tracker = tracker = DumpBatch.create(job_class: self.class.to_s, status: 'in_progress')
41
+ @tracker = tracker = DumpBatch.create(job_class: self.class.to_s, genre: export_genre, status: 'in_progress')
42
+
43
+ @batch_context = context = {
44
+ # TODO Consider behavior if last is still running
45
+ incremental_since: last_successful_tracker&.created_at,
46
+ }
34
47
 
35
48
  destinations.each do |dest|
36
- dest.initialize_dump()
49
+ dest.preinitialize_dump(context)
50
+ end
51
+
52
+ begin
53
+ begin
54
+ destinations.each do |dest|
55
+ dest.initialize_dump(context)
56
+ end
57
+
58
+ run_hook(:initialize_dump_batch, context)
59
+ ensure
60
+ @batch_context = nil
61
+ context[:tracker_id] = tracker.id
62
+ context[:origin_class] = batch_context[:origin_class] || self.class.to_s
63
+ context[:destinations] = @raw_destinations
64
+ end
65
+
66
+ Sidekiq::Batch.new.tap do |batch|
67
+ context[:root_bid] = batch.bid
68
+ tracker.update(batch_id: batch.bid)
69
+
70
+ batch.description = "HD #{export_genre} Export #{tracker.id} Root"
71
+ batch.context = context
72
+ batch.on(:success, "#{self.class}#finalize_dump")
73
+ batch.on(:death, "#{self.class}#cleanup_fatal_error!")
74
+ batch.jobs do
75
+ enqueue_tasks
76
+ rescue => ex
77
+ delayed :cleanup_fatal_error!
78
+ InstDataShipper.handle_suppressed_error(ex)
79
+ tracker.update(status: 'failed', exception: ex.message, backtrace: ex.backtrace.join("\n"))
80
+ end
81
+ end
82
+ rescue => ex
83
+ if context
84
+ batch ||= Sidekiq::Batch.new.tap do |batch|
85
+ batch.description = "HD #{export_genre} Export #{tracker.id} Early Failure Cleanup"
86
+ batch.context = context
87
+ batch.jobs do
88
+ delayed :cleanup_fatal_error!
89
+ end
90
+ end
91
+ end
92
+ tracker.update(status: 'failed', exception: ex.message, backtrace: ex.backtrace.join("\n"))
93
+ raise ex
94
+ end
95
+ end
96
+
97
+ def tracker
98
+ @tracker ||= batch_context[:tracker_id].present? ? DumpBatch.find(batch_context[:tracker_id]) : nil
99
+ end
100
+
101
+ def last_successful_tracker
102
+ @last_successful_tracker ||= DumpBatch.where(job_class: self.class.to_s, genre: export_genre, status: 'completed').order(created_at: :desc).first
103
+ end
104
+
105
+ def export_genre
106
+ self.class.to_s
107
+ end
108
+
109
+ def origin_class
110
+ batch_context[:origin_class]&.constantize || self.class
111
+ end
112
+
113
+ def schema
114
+ return origin_class::SCHEMA if defined?(origin_class::SCHEMA)
115
+ raise NotImplementedError
116
+ end
117
+
118
+ def schema_digest
119
+ Digest::MD5.hexdigest(schema.to_json)[0...8]
120
+ end
121
+
122
+ def table_is_incremental?(table_def)
123
+ return false unless incremental_since.present?
124
+
125
+ # TODO Return false if table's schema changes
126
+ if (inc = table_def[:incremental]).present?
127
+ differ = inc[:if]
128
+ return !!incremental_since if differ.nil?
129
+
130
+ differ = :"#{differ}".to_proc if differ.is_a?(Symbol)
131
+ differ = instance_exec(&differ) if differ.is_a?(Proc)
132
+ return !!differ
37
133
  end
38
134
 
39
- context = {}
40
- run_hook(:initialize_dump_batch, context)
41
-
42
- Sidekiq::Batch.new.tap do |batch|
43
- batch.description = "HD #{export_genre} Export #{tracker.id} Root"
44
- batch.context = {
45
- **context,
46
- root_bid: batch.bid,
47
- tracker_id: tracker.id,
48
- origin_class: batch_context[:origin_class] || self.class.to_s,
49
- destinations: @raw_destinations,
50
- }
51
- batch.on(:success, "#{self.class}#finalize_dump")
52
- batch.on(:death, "#{self.class}#cleanup_fatal_error!")
53
- batch.jobs do
54
- enqueue_tasks
135
+ false
136
+ end
137
+
138
+ def incremental_since
139
+ batch_context[:incremental_since]
140
+ end
141
+
142
+ def lookup_table_schema(*identifiers)
143
+ identifiers.compact.each do |ident|
144
+ if ident.is_a?(Hash)
145
+ key = ident.keys.first
146
+ value = ident.values.first
147
+ else
148
+ key = :warehouse_name
149
+ value = ident
150
+ end
151
+
152
+ value = Array(value).compact
153
+
154
+ schema[:tables].each do |ts|
155
+ return ts if value.include?(ts[key])
55
156
  end
56
157
  end
57
158
 
58
- # TODO Catch errors in here and cleanup as needed
159
+ nil
160
+ end
161
+
162
+ def lookup_table_schema!(*identifiers)
163
+ lookup_table_schema(*identifiers) || raise("No table schema found for #{identifiers.inspect}")
164
+ end
165
+
166
+ protected
167
+
168
+ attr_reader :executor
169
+
170
+ def initialize(destinations = nil, executor: nil)
171
+ @raw_destinations = Array(destinations)
172
+ @executor = executor
173
+ end
174
+
175
+ def enqueue_tasks
176
+ raise NotImplementedError
59
177
  end
60
178
 
61
179
  def upload_data(table_def, extra: nil, &datagen)
@@ -96,7 +214,7 @@ module InstDataShipper
96
214
  def finalize_dump(_status, _opts)
97
215
  run_hook(:finalize_dump_batch)
98
216
 
99
- destination.each do |dest|
217
+ destinations.each do |dest|
100
218
  dest.finalize_dump
101
219
  end
102
220
 
@@ -108,41 +226,31 @@ module InstDataShipper
108
226
 
109
227
  run_hook(:finalize_dump_batch)
110
228
 
111
- destination.each do |dest|
229
+ destinations.each do |dest|
112
230
  dest.cleanup_fatal_error
113
- rescue StandardError # rubocop:disable Lint/SuppressedException
231
+ rescue => ex
232
+ InstDataShipper.handle_suppressed_error(ex)
114
233
  end
115
234
 
116
235
  DumpBatch.find(batch_context[:tracker_id]).update(status: 'failed')
117
236
 
118
- CanvasSync::JobBatches::Batch.delete_prematurely!(batch_context[:root_bid])
237
+ CanvasSync::JobBatches::Batch.delete_prematurely!(batch_context[:root_bid]) if batch_context[:root_bid].present?
119
238
  end
120
239
 
121
240
  # Helper Methods
122
241
 
123
- def table_schemas
124
- return origin_class::TABLE_SCHEMAS if defined?(origin_class::TABLE_SCHEMAS)
125
- raise NotImplementedError
126
- end
127
-
128
242
  def delayed(mthd, *args, **kwargs)
129
- AsyncCaller.perform_later(self.class.to_s, mthd.to_s, *args, **kwargs)
243
+ Jobs::AsyncCaller.perform_later(self.class.to_s, mthd.to_s, *args, **kwargs)
130
244
  end
131
245
 
132
- def tracker
133
- @tracker ||= batch_context[:tracker_id].present? ? DumpBatch.find(batch_context[:tracker_id]) : nil
134
- end
246
+ delegate :working_dir, to: :executor
135
247
 
136
- def export_genre
137
- self.class.to_s.gsub(/HD|ExportJob/, '')
138
- end
139
-
140
- def origin_class
141
- batch_context[:origin_class]&.constantize || self.class
248
+ def batch
249
+ Thread.current[CanvasSync::JobBatches::CURRENT_BATCH_THREAD_KEY]
142
250
  end
143
251
 
144
- def working_dir
145
- executor.working_dir
252
+ def batch_context
253
+ @batch_context || batch&.context || {}
146
254
  end
147
255
 
148
256
  def destinations_for_table(table_def)
@@ -150,7 +258,7 @@ module InstDataShipper
150
258
  end
151
259
 
152
260
  def destinations
153
- @destinations ||= (@raw_destinations || batch_context[:destinations]).map.with_index do |dest, i|
261
+ @destinations ||= (@raw_destinations.presence || batch_context[:destinations]).map.with_index do |dest, i|
154
262
  dcls = InstDataShipper.resolve_destination(dest)
155
263
  dcls.new("#{InstDataShipper.redis_prefix}:dump#{tracker.id}:dest#{i}", dest, self)
156
264
  end
@@ -4,5 +4,11 @@ module InstDataShipper
4
4
  class Engine < ::Rails::Engine
5
5
  isolate_namespace InstDataShipper
6
6
 
7
+ initializer :append_migrations do |app|
8
+ config.paths["db/migrate"].expanded.each do |expanded_path|
9
+ app.config.paths["db/migrate"] << expanded_path
10
+ end
11
+ ActiveRecord::Migrator.migrations_paths = Rails.application.paths['db/migrate'].to_a
12
+ end
7
13
  end
8
14
  end
@@ -1,7 +1,14 @@
1
+
2
+ require "sidekiq"
3
+
1
4
  module InstDataShipper
2
5
  module Jobs
3
6
  class AsyncCaller < InstDataShipper::Jobs::Base
4
- sidekiq_options retry: 6 if defined?(sidekiq_options)
7
+ sidekiq_options(retry: 0) if defined?(sidekiq_options)
8
+
9
+ def self.get_sidekiq_options
10
+ { retry: 0 }
11
+ end
5
12
 
6
13
  def self.call_from_pool(pool, clazz, method, *args, **kwargs)
7
14
  pool.add_job(
@@ -12,7 +19,8 @@ module InstDataShipper
12
19
  end
13
20
 
14
21
  def perform(clazz, method, *args, **kwargs)
15
- clazz.constantize.new(executor: self).send(method.to_sym, *args, **kwargs)
22
+ clazz = clazz.constantize if clazz.is_a?(String)
23
+ clazz.new(executor: self).send(method.to_sym, *args, **kwargs)
16
24
  end
17
25
  end
18
26
  end
@@ -1,35 +1,59 @@
1
1
  module InstDataShipper
2
+ # This class ends up fill two roles - Schema and Mapping.
3
+ # It makes for a clean API, but it's a little less canonical since, (eg) the S3 destination doesn't need column type annotations.
2
4
  class SchemaBuilder
3
- attr_reader :tables
5
+ attr_reader :schema
4
6
 
5
7
  def initialize
6
- @tables = []
8
+ @schema = {
9
+ tables: [],
10
+ }
7
11
  end
8
12
 
9
13
  def self.build(&block)
10
14
  builder = new
11
15
  builder.instance_exec(&block)
12
- builder.tables
16
+ builder.schema
17
+ end
18
+
19
+ def version(version)
20
+ @schema[:version] = version
13
21
  end
14
22
 
15
- def table(model_or_name, description = nil, as: nil, includes: nil, incremental: false, &block)
16
- as ||= model_or_name
17
- as = as.table_name if as.respond_to?(:table_name)
23
+ def extend_table_builder(&block)
24
+ @table_builder_class ||= Class.new(TableSchemaBuilder)
25
+ @table_builder_class.class_eval(&block)
26
+ end
18
27
 
28
+ def table(model_or_name, description = nil, model: nil, query: nil, **extra, &block)
19
29
  tdef = {
30
+ warehouse_name: nil,
20
31
  description: description,
21
- model: model_or_name.is_a?(String) ? nil : model_or_name,
22
- warehouse_name: as.to_s,
23
- incremental: incremental,
24
32
  columns: [],
25
- includes: includes,
33
+
34
+ model: model,
35
+ query: query,
36
+ **extra,
26
37
  }
27
38
 
28
- TableSchemaBuilder.build(tdef, &block)
39
+ if model_or_name.is_a?(ActiveRecord::Relation)
40
+ raise "model specified twice" if model.present?
41
+ raise "query specified twice" if query.present?
42
+
43
+ tdef[:query] = model_or_name
44
+ tdef[:model] = model_or_name.model
45
+ elsif model_or_name.is_a?(Class) && model_or_name < ActiveRecord::Base
46
+ tdef[:warehouse_name] = model_or_name.table_name
47
+ tdef[:model] = model_or_name
48
+ else
49
+ tdef[:warehouse_name] = model_or_name
50
+ end
51
+
52
+ @table_builder_class.build(tdef, &block)
29
53
 
30
- @tables << tdef
54
+ @schema[:tables] << tdef
31
55
 
32
- self
56
+ tdef
33
57
  end
34
58
 
35
59
  class TableSchemaBuilder
@@ -46,48 +70,86 @@ module InstDataShipper
46
70
  builder.columns
47
71
  end
48
72
 
49
- # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
50
- def column(name, *args, **kwargs, &block)
51
- name = { name => name } unless name.is_a?(Hash)
52
- raise ArgumentError, 'Exactly one name must be provided' unless name.count == 1
73
+ # def annotate(key, value)
74
+ # options[key] = value
75
+ # end
76
+
77
+ def version(version)
78
+ options[:version] = version
79
+ end
80
+
81
+ def incremental(scope=nil, **kwargs)
82
+ if (extras = kwargs.keys - %i[on if]).present?
83
+ raise ArgumentError, "Unsuppored options: #{extras.inspect}"
84
+ end
85
+
86
+ options[:incremental] = {
87
+ on: Array(kwargs[:on]),
88
+ scope: scope,
89
+ if: kwargs[:if],
90
+ }
91
+ end
92
+
93
+ def column(name, *args, refs: [], from: nil, **extra, &block)
94
+ from ||= name.to_s
53
95
 
54
96
  cdef = {
55
- local_name: name.keys[0].to_s,
56
- warehouse_name: name.values[0].to_s,
57
- transformer: block,
97
+ warehouse_name: name.to_s,
98
+ from: from,
99
+ **extra,
58
100
  }
59
101
 
60
- [:description, :type, :refs => :references].each do |k|
61
- if k.is_a? Hash
62
- k.each do |hk, hv|
63
- cdef[hv] = kwargs.delete(hk) if kwargs.key?(hk)
64
- end
65
- elsif kwargs.key?(k)
66
- cdef[k] = kwargs.delete(k)
67
- end
102
+ if args[0].is_a?(Symbol)
103
+ cdef[:type] = args.shift()
68
104
  end
69
105
 
70
- cdef[:references] = Array(cdef[:references])
71
-
72
- args[0..1].each do |a|
73
- k = (a.is_a?(String) && :description) || (a.is_a?(Symbol) && :type) || nil
74
- raise ArgumentError, 'Unsupported Argument' if k.nil?
75
- raise ArgumentError, "Duplicate Argument for #{k}" if cdef.key?(k)
106
+ if args[0].is_a?(String)
107
+ cdef[:description] = args.shift()
108
+ end
76
109
 
77
- cdef[k] = a
110
+ if args.present?
111
+ raise ArgumentError, "Received unexpected arguments: #{args.inspect}"
78
112
  end
79
113
 
114
+ cdef[:references] = Array(refs)
115
+
80
116
  if options[:model].is_a?(Class) && cdef[:local_name].to_s.ends_with?('_id')
81
117
  rel_name = cdef[:local_name].to_s[0...-3]
82
118
  refl = options[:model].reflections[rel_name]
83
119
  cdef[:references] << "#{refl.klass}##{refl.options[:primary_key] || 'id'}" if refl.present? && !refl.polymorphic?
84
120
  end
85
121
 
122
+ compiled_from = compile_transformer(from)
123
+
124
+ cdef[:block] = ->(row) {
125
+ value = instance_exec(row, &compiled_from)
126
+ value = instance_exec(value, row, &block) if block.present?
127
+ value
128
+ }
129
+
86
130
  @columns << cdef
87
131
 
88
- self
132
+ cdef
133
+ end
134
+
135
+ protected
136
+
137
+ def compile_transformer(from)
138
+ if from.present?
139
+ if from.is_a?(Symbol)
140
+ ->(row) { row.send(from) }
141
+ elsif from.is_a?(Proc)
142
+ from
143
+ elsif from.is_a?(String)
144
+ ->(row) { row[from] }
145
+ else
146
+ raise ArgumentError, "Invalid transformer: #{from.inspect}"
147
+ end
148
+ else
149
+ ->(row) { row }
150
+ end
89
151
  end
90
- # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
152
+
91
153
  end
92
154
  end
93
155
  end
@@ -1,3 +1,3 @@
1
1
  module InstDataShipper
2
- VERSION = "0.1.0.beta1".freeze
2
+ VERSION = "0.2.0".freeze
3
3
  end
@@ -23,15 +23,23 @@ module InstDataShipper
23
23
  destination = @destination_aliases[type]
24
24
  end
25
25
 
26
- safe_constantize(destination)
26
+ destination.constantize
27
27
  end
28
28
 
29
29
  def start_basic_dump(*args, **kwargs, &block)
30
30
  BasicDumper.perform_dump(*args, **kwargs, &block)
31
31
  end
32
32
 
33
+ def handle_suppressed_error(ex)
34
+ logger.error "Suppressed Error: #{ex.message}"
35
+ logger.error ex.backtrace.join("\n")
36
+ Raven.capture_exception(ex) if defined?(Raven)
37
+ Sentry.capture_exception(ex) if defined?(Sentry)
38
+ end
39
+
33
40
  def logger
34
41
  return @logger if defined? @logger
42
+ # TODO Annotate logs with DumpBatch ID
35
43
  @logger = Logger.new(STDOUT)
36
44
  @logger.level = Logger::DEBUG
37
45
  @logger
@@ -42,7 +50,7 @@ module InstDataShipper
42
50
  end
43
51
 
44
52
  def redis_prefix
45
- pfx = "hdd"
53
+ pfx = "ids"
46
54
  pfx = "#{Apartment::Tenant.current}:#{pfx}" if defined?(Apartment)
47
55
  pfx
48
56
  end
@@ -66,6 +74,8 @@ Dir[File.dirname(__FILE__) + "/inst_data_shipper/destinations/*.rb"].each do |fi
66
74
  basename = File.basename(file, ".rb")
67
75
  next if basename == "base"
68
76
 
69
- InstDataShipper.alias_destination(basename.dasherize, "InstDataShipper::Destinations::#{basename.classify}")
77
+ InstDataShipper.alias_destination(basename.dasherize, "InstDataShipper::Destinations::#{basename.camelize}")
70
78
  end
71
79
 
80
+ require "inst_data_shipper/dumper"
81
+ require "inst_data_shipper/basic_dumper"
data/spec/spec_helper.rb CHANGED
@@ -7,7 +7,7 @@ require File.expand_path("../dummy/config/environment.rb", __FILE__)
7
7
  require "bundler/setup"
8
8
  require 'rspec/rails'
9
9
  require 'spec_helper'
10
- require 'factory_girl_rails'
10
+ require 'factory_bot_rails'
11
11
  require 'timecop'
12
12
  require 'webmock/rspec'
13
13
  require 'support/fake_canvas'
@@ -29,7 +29,7 @@ ActiveRecord::Migration.maintain_test_schema!
29
29
  RSpec.configure do |config|
30
30
  config.extend WithModel
31
31
 
32
- config.include FactoryGirl::Syntax::Methods
32
+ config.include FactoryBot::Syntax::Methods
33
33
  config.use_transactional_fixtures = true
34
34
  config.infer_spec_type_from_file_location!
35
35
  config.filter_rails_from_backtrace!
metadata CHANGED
@@ -1,27 +1,27 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: inst_data_shipper
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0.beta1
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Instructure CustomDev
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-03-06 00:00:00.000000000 Z
11
+ date: 2024-03-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "<"
17
+ - - "~>"
18
18
  - !ruby/object:Gem::Version
19
19
  version: '6.0'
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "<"
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '6.0'
27
27
  - !ruby/object:Gem::Dependency
@@ -360,6 +360,20 @@ dependencies:
360
360
  - - ">="
361
361
  - !ruby/object:Gem::Version
362
362
  version: '0'
363
+ - !ruby/object:Gem::Dependency
364
+ name: faraday_middleware
365
+ requirement: !ruby/object:Gem::Requirement
366
+ requirements:
367
+ - - ">="
368
+ - !ruby/object:Gem::Version
369
+ version: '0'
370
+ type: :runtime
371
+ prerelease: false
372
+ version_requirements: !ruby/object:Gem::Requirement
373
+ requirements:
374
+ - - ">="
375
+ - !ruby/object:Gem::Version
376
+ version: '0'
363
377
  description:
364
378
  email:
365
379
  - pseng@instructure.com
@@ -369,8 +383,8 @@ extra_rdoc_files: []
369
383
  files:
370
384
  - README.md
371
385
  - Rakefile
372
- - app/models/hosted_data_dumper/dump_batch.rb
373
- - db/migrate/20240301090836_create_canvas_sync_sync_batches.rb
386
+ - app/models/inst_data_shipper/dump_batch.rb
387
+ - db/migrate/20240301090836_create_inst_data_shipper_dump_batches.rb
374
388
  - lib/inst_data_shipper.rb
375
389
  - lib/inst_data_shipper/basic_dumper.rb
376
390
  - lib/inst_data_shipper/concerns/hooks.rb
@@ -385,7 +399,6 @@ files:
385
399
  - lib/inst_data_shipper/engine.rb
386
400
  - lib/inst_data_shipper/jobs/async_caller.rb
387
401
  - lib/inst_data_shipper/jobs/base.rb
388
- - lib/inst_data_shipper/jobs/basic_dump_job.rb
389
402
  - lib/inst_data_shipper/record.rb
390
403
  - lib/inst_data_shipper/schema_builder.rb
391
404
  - lib/inst_data_shipper/version.rb
@@ -422,9 +435,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
422
435
  version: '0'
423
436
  required_rubygems_version: !ruby/object:Gem::Requirement
424
437
  requirements:
425
- - - ">"
438
+ - - ">="
426
439
  - !ruby/object:Gem::Version
427
- version: 1.3.1
440
+ version: '0'
428
441
  requirements: []
429
442
  rubygems_version: 3.1.6
430
443
  signing_key:
@@ -1,11 +0,0 @@
1
- module InstDataShipper
2
- module Jobs
3
- class BasicDumpJob < InstDataShipper::Jobs::Base
4
- sidekiq_options retry: 3 if defined?(sidekiq_options)
5
-
6
- def perform(endpoints, dump_class = nil)
7
- dumper.perform_dump(endpoints: endpoints)
8
- end
9
- end
10
- end
11
- end