inst_data_shipper 0.2.2 → 0.2.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 522ed20f5f574d452bf2af49cf8e7ca28377d86d16c9c89cce0aa02a6d89e025
4
- data.tar.gz: 5d0cedf102b079833cbf7f670023ab700beb557e1902f454c5c709b6e7b4629e
3
+ metadata.gz: fc59b5931e2a448fa9a687147b848800fd4478da037762eb3075f3e9955706d3
4
+ data.tar.gz: 13382736ae30290d0997af560ce0c6b0e5445dbb6ddbbf832368fd495e467dca
5
5
  SHA512:
6
- metadata.gz: 7880a8aa492c216d767af6a7bf87619ea9ce9f2dd6b2eccdc938acae32b6dd69bf756508f0bf365772e909b93b72b9288ebcdf54afec18348b92da60e5867524
7
- data.tar.gz: 3d3762b4bbf53380f519b4b77ec23dce500d403b793c3da7eee6fe9290364048fc579e2c3f4fe39b86f46fc7c7f5ebfa716a30225a38f4beb05b40c3fd25b1a5
6
+ metadata.gz: 3a118fc9c812be9b61c4ddfd7506a8f4c6b0c1d1ab0ef7deef3d45e13ee35631f110283eca1d95c2ce2c817e2c8144735d716380d9bbaf07e00516913d73ec31
7
+ data.tar.gz: c103956f29f5a27c6039c4eb8381d9a42a0fe54bd1c9df8e0b86daff32f7f768c61f255ab8819b1b0e197b895becc0fbf9ce55a3be8072f69e6597aef4416e6b
data/README.md CHANGED
@@ -22,6 +22,10 @@ bundle exec rake db:migrate
22
22
 
23
23
  The main tool provided by this Gem is the `InstDataDumper::Dumper` class. It is used to define a "Dump" which is a combination of tasks and schema.
24
24
 
25
+ It is a assumed that a `Dumper` class definition is the source of truth for all tables that it manages, and that no other processes affect the tables' data or schema. You can break this assumption, but you should understand how the `incremental` logic works and what will and will not trigger a full table upload. Dumpers have a `export_genre` method that determines the what Dumps to look at when calculating incrementals.
26
+ - High level, the HD backend will look for a past dump of the same genre. If not found, a full upload of all tables is triggered. If found, each table's schema is compared; any tables with mismatched schema (determined by hashing) will do a full upload.
27
+ - Note that `Proc`s in the schema are not included in the hash calculation. If you change a `Proc` implementation and need to trigger a full-upload of the table, you'll need to change something else too (like the `version`).
28
+
25
29
  Here is an example `Dumper` implementation, wrapped in an ActiveJob job:
26
30
  ```ruby
27
31
  class HostedDataPushJob < ApplicationJob
@@ -55,27 +59,31 @@ class HostedDataPushJob < ApplicationJob
55
59
  # `if:` may be a Proc or a Symbol (of a method on the Dumper)
56
60
  incremental "updated_at", on: [:id], if: ->() {}
57
61
 
58
- # Schema's may declaratively define the data source.
62
+ # Schemas may declaratively define the data source.
59
63
  # This can be used for basic schemas where there's a 1:1 mapping between source table and destination table, and there is no conditional logic that needs to be performed.
60
64
  # In order to apply these statements, your Dumper must call `auto_enqueue_from_schema`.
61
65
  source :local_table
62
66
  # A Proc can also be passed. The below is equivalent to the above
63
67
  source ->(table_def) { import_local_table(table_def[:model] || table_def[:warehouse_name]) }
64
68
 
69
+ # You may manually note a version on the table.
70
+ # Note that if a version is present, the version value replaces the hash-comparison when calculating incrementals, so you must change the version whenever the schema changes enough to trigger a full-upload
71
+ version "1.0.0"
72
+
65
73
  column :name_in_destinations, :maybe_optional_sql_type, "Optional description of column"
66
74
 
67
75
  # The type may usually be omitted if the `table()` is passed a Model class, but strings are an exception to this
68
- custom_column :name, :"varchar(128)"
76
+ column :name, :"varchar(128)"
69
77
 
70
78
  # `from:` May be...
71
79
  # A Symbol of a method to be called on the record
72
- custom_column :sis_type, :"varchar(32)", from: :some_model_method
80
+ column :sis_type, :"varchar(32)", from: :some_model_method
73
81
  # A String of a column to read from the record
74
- custom_column :sis_type, :"varchar(32)", from: "sis_source_type"
82
+ column :sis_type, :"varchar(32)", from: "sis_source_type"
75
83
  # A Proc to be called with each record
76
- custom_column :sis_type, :"varchar(32)", from: ->(rec) { ... }
84
+ column :sis_type, :"varchar(32)", from: ->(rec) { ... }
77
85
  # Not specified. Will default to using the Schema Column Name as a String ("sis_type" in this case)
78
- custom_column :sis_type, :"varchar(32)"
86
+ column :sis_type, :"varchar(32)"
79
87
  end
80
88
 
81
89
  table("my_table", model: ALocalModel) do
@@ -1,12 +1,12 @@
1
1
  module InstDataShipper
2
2
  class BasicDumper < Dumper
3
- def self.perform_dump(destinations:, schema:, &block)
3
+ def self.perform_dump(destinations:, schema:, force_full_tables: nil, &block)
4
4
  raise "Schema must be a constantizable string" unless schema.is_a?(String)
5
5
 
6
6
  dumper = new(destinations)
7
7
  dumper.instance_variable_set(:@schema_pointer, schema)
8
8
  dumper.instance_variable_set(:@body_block, block)
9
- dumper.begin_dump
9
+ dumper.begin_dump(force_full_tables: force_full_tables)
10
10
 
11
11
  dumper.tracker
12
12
  end
@@ -63,6 +63,7 @@ module InstDataShipper
63
63
 
64
64
  protected
65
65
 
66
+ # TODO Make a class-method?
66
67
  def parse_configuration(uri)
67
68
  if block_given?
68
69
  parsed = URI.parse(uri)
@@ -10,20 +10,34 @@ module InstDataShipper
10
10
  begin
11
11
  last_dump = hosted_data_client.get("api/v1/custom_dumps/last", {
12
12
  status: 'imported',
13
- # schema_version: convert_schema[:version],
14
13
  tags: [
15
- "ids-schema=#{dumper.schema_digest}",
14
+ # We could also include app in the filter, but each app should already have a distinct key in HD
16
15
  "ids-genre=#{dumper.export_genre}",
17
16
  ],
17
+ include: ["schema"],
18
18
  }).body.with_indifferent_access
19
19
 
20
20
  if last_dump[:created_at] < context[:incremental_since]
21
21
  InstDataShipper.logger.info("Last successful HostedData dump is older than incremental_since - bumping back incremental_since")
22
22
  context[:incremental_since] = last_dump[:created_at]
23
23
  end
24
+
25
+ if (hd_tables = last_dump[:schema]).present?
26
+ metadatas = hd_tables.values.map { |t| t[:ids_meta] }.compact.map { |t| [t[:table_warehouse_name], t] }.to_h
27
+ schema[:tables].each do |ts|
28
+ hd_meta = metadatas[ts[:warehouse_name]]
29
+
30
+ if !dumper.table_schema_compatible?(ts, hd_meta)
31
+ InstDataShipper.logger.info("Last successful HostedData dump of #{ts[:warehouse_name]} has a different schema - forcing full table")
32
+ context[:force_full_tables] << ts[:warehouse_name]
33
+ end
34
+ end
35
+ elsif !last_dump[:tags].include?("ids-schema=#{dumper.schema_digest}")
36
+ InstDataShipper.logger.info("Last successful HostedData dump of the same genre has a different schema - not using incremental_since")
37
+ context[:incremental_since] = nil
38
+ end
24
39
  rescue Faraday::ResourceNotFound
25
- # TODO It'd be nice to make this per-table
26
- InstDataShipper.logger.info("No Last successful HostedData dump of the same schema - not using incremental_since")
40
+ InstDataShipper.logger.info("No Last successful HostedData dump of the same genre - not using incremental_since")
27
41
  context[:incremental_since] = nil
28
42
  end
29
43
  end
@@ -115,6 +129,7 @@ module InstDataShipper
115
129
  type: coltype,
116
130
  }
117
131
  end,
132
+ ids_meta: dumper.table_schema_metadata(ts),
118
133
  }
119
134
  end
120
135
 
@@ -5,18 +5,18 @@ module InstDataShipper
5
5
  define_hook :initialize_dump_batch
6
6
  define_hook :finalize_dump_batch
7
7
 
8
- def self.perform_dump(destinations)
8
+ def self.perform_dump(destinations, force_full_tables: nil)
9
9
  raise "Must subclass Dumper to use perform_dump" if self == Dumper
10
10
 
11
11
  dumper = new(destinations)
12
- dumper.begin_dump
12
+ dumper.begin_dump(force_full_tables: force_full_tables)
13
13
 
14
14
  dumper.tracker
15
15
  end
16
16
 
17
17
  def self.define(include: [], schema: , &blk)
18
18
  Class.new(self) do
19
- include(*include)
19
+ include(*include) if include.present?
20
20
 
21
21
  if blk.nil? && schema[:tables].any? { |t| t[:sourcer].present? }
22
22
  blk = -> { auto_enqueue_from_schema }
@@ -51,7 +51,7 @@ module InstDataShipper
51
51
 
52
52
  public
53
53
 
54
- def begin_dump
54
+ def begin_dump(force_full_tables: nil)
55
55
  raise "Dump already begun" unless @raw_destinations.present?
56
56
 
57
57
  @tracker = tracker = DumpBatch.create(job_class: self.class.to_s, genre: export_genre, status: 'in_progress')
@@ -59,6 +59,7 @@ module InstDataShipper
59
59
  @batch_context = context = {
60
60
  # TODO Consider behavior if last is still running
61
61
  incremental_since: last_successful_tracker&.created_at,
62
+ force_full_tables: force_full_tables || [],
62
63
  }
63
64
 
64
65
  destinations.each do |dest|
@@ -79,6 +80,8 @@ module InstDataShipper
79
80
  context[:destinations] = @raw_destinations
80
81
  end
81
82
 
83
+ context.delete(:force_full_tables) if context[:force_full_tables].empty?
84
+
82
85
  Sidekiq::Batch.new.tap do |batch|
83
86
  context[:root_bid] = batch.bid
84
87
  tracker.update(batch_id: batch.bid)
@@ -135,9 +138,42 @@ module InstDataShipper
135
138
  Digest::MD5.hexdigest(schema.to_json)[0...8]
136
139
  end
137
140
 
141
+ def table_schema_metadata(table_def)
142
+ meta = {
143
+ table_warehouse_name: table_def[:warehouse_name],
144
+ table_schema_hash: table_schema_hash(table_def),
145
+ }
146
+
147
+ meta[:table_schema_version] = table_def[:version] if table_def[:version].present?
148
+
149
+ meta
150
+ end
151
+
152
+ def table_schema_compatible?(table_def, meta_hash)
153
+ # Force full-table-upload if:
154
+ # - The table is not present in the last dump
155
+ return false unless meta_hash
156
+
157
+ # - The table's explicitly-set versions do not match
158
+ return false if meta_hash[:table_schema_version] != table_def[:version]
159
+
160
+ # - The table does not have an explicitly-set version and the schema hash does not match
161
+ return false if !table_def[:version].present? && meta_hash[:table_schema_hash] != table_schema_hash(table_def)
162
+
163
+ true
164
+ end
165
+
166
+ def table_schema_hash(table_def)
167
+ Digest::MD5.hexdigest(table_def.to_json)[0...8]
168
+ end
169
+
138
170
  def table_is_incremental?(table_def)
139
171
  return false unless incremental_since.present?
140
172
 
173
+ table_def = lookup_table_schema!(table_def) if table_def.is_a?(String)
174
+
175
+ return false if batch_context[:force_full_tables]&.include?(table_def[:warehouse_name])
176
+
141
177
  # TODO Return false if table's schema changes
142
178
  if (inc = table_def[:incremental]).present?
143
179
  differ = inc[:if]
@@ -192,11 +228,16 @@ module InstDataShipper
192
228
  raise NotImplementedError
193
229
  end
194
230
 
231
+ def enqueue_table_from_schema(table_def)
232
+ table_def = lookup_table_schema!(table_def) if table_def.is_a?(String)
233
+ instance_exec(table_def, &table_def[:sourcer])
234
+ end
235
+
195
236
  def auto_enqueue_from_schema
196
237
  schema[:tables].each do |table_def|
197
238
  src = table_def[:sourcer]
198
239
  next unless src.present?
199
- instance_exec(table_def, &src)
240
+ enqueue_table_from_schema(table_def)
200
241
  end
201
242
  end
202
243
 
@@ -1,5 +1,5 @@
1
1
  module InstDataShipper
2
- # This class ends up fill two roles - Schema and Mapping.
2
+ # This class ends up filling two roles - Schema and Mapping.
3
3
  # It makes for a clean API, but it's a little less canonical since, (eg) the S3 destination doesn't need column type annotations.
4
4
  class SchemaBuilder
5
5
  attr_reader :schema
@@ -92,15 +92,17 @@ module InstDataShipper
92
92
  }
93
93
  end
94
94
 
95
- def source(source, override_model=nil, **kwargs)
95
+ def source(source=nil, override_model=nil, **kwargs, &block)
96
96
  raise "Source already set" if options[:sourcer].present?
97
+ raise ArgumentError, "Cannot provide source and block" if source.present? && block.present?
97
98
 
98
99
  if source.is_a?(Symbol)
99
100
  mthd = :"import_#{source}"
100
101
  options = self.options
101
102
  source = ->(table_def) { send(mthd, override_model || options[:model] || options[:warehouse_name], schema_name: options[:warehouse_name], **kwargs) }
102
103
  end
103
- options[:sourcer] = source
104
+
105
+ self.options[:sourcer] = source
104
106
  end
105
107
 
106
108
  def column(name, *args, refs: [], from: nil, **extra, &block)
@@ -1,3 +1,3 @@
1
1
  module InstDataShipper
2
- VERSION = "0.2.2".freeze
2
+ VERSION = "0.2.3".freeze
3
3
  end
@@ -0,0 +1,3 @@
1
+ class ApplicationRecord < ActiveRecord::Base
2
+ self.abstract_class = true
3
+ end
@@ -10,7 +10,7 @@
10
10
  #
11
11
  # It's strongly recommended that you check this file into your version control system.
12
12
 
13
- ActiveRecord::Schema.define(version: 2020_10_30_210836) do
13
+ ActiveRecord::Schema.define(version: 2024_03_01_090836) do
14
14
 
15
15
  # These are extensions that must be enabled in order to support this database
16
16
  enable_extension "plpgsql"
@@ -42,4 +42,17 @@ ActiveRecord::Schema.define(version: 2020_10_30_210836) do
42
42
  t.string "batch_bid"
43
43
  end
44
44
 
45
+ create_table "inst_data_shipper_dump_batches", id: :serial, force: :cascade do |t|
46
+ t.datetime "started_at"
47
+ t.datetime "completed_at"
48
+ t.string "status"
49
+ t.string "job_class"
50
+ t.string "genre"
51
+ t.string "batch_id"
52
+ t.string "exception"
53
+ t.text "backtrace"
54
+ t.datetime "created_at"
55
+ t.datetime "updated_at"
56
+ end
57
+
45
58
  end
@@ -0,0 +1,89 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe InstDataShipper::Destinations::HostedData do
4
+ let(:dumper) { TestDumper.new }
5
+ let(:dest) { described_class.new("", "hosted-data://key@hosted-data.local", dumper) }
6
+
7
+ describe "#preinitialize_dump" do
8
+ let(:context) { {
9
+ incremental_since: 1.day.ago,
10
+ force_full_tables: [],
11
+ } }
12
+
13
+ let(:response_body) { {
14
+ created_at: 1.hour.ago.iso8601,
15
+ tags: ["ids-schema=abc123"],
16
+ } }
17
+
18
+ before :each do
19
+ allow(dumper).to receive(:schema_digest).and_return("abc123")
20
+ stub_request(:get, "https://hosted-data.local/api/v1/custom_dumps/last").with(query: hash_including()).to_return do |req|
21
+ {
22
+ body: response_body.to_json,
23
+ headers: {'Content-Type' => 'application/json'},
24
+ }
25
+ end
26
+ end
27
+
28
+ it "leaves incremental_since alone if last dump is newer" do
29
+ x = context[:incremental_since]
30
+ dest.preinitialize_dump(context)
31
+ expect(context[:incremental_since]).to eq x
32
+ end
33
+
34
+ it "bumps back incremental_since if last dump is older" do
35
+ response_body[:created_at] = 2.days.ago.iso8601
36
+ dest.preinitialize_dump(context)
37
+ expect(context[:incremental_since]).to eq(response_body[:created_at])
38
+ end
39
+
40
+ it "nils incremental_since if last dump is of a different schema" do
41
+ response_body[:tags] = ["ids-schema=def456"]
42
+ dest.preinitialize_dump(context)
43
+ expect(context[:incremental_since]).to be nil
44
+ end
45
+
46
+ context "per-table forcing" do
47
+ before :each do
48
+ response_body[:schema] = {
49
+ "test_table" => {
50
+ ids_meta: {
51
+ table_warehouse_name: "test_table",
52
+ table_schema_hash: "def456",
53
+ },
54
+ }
55
+ }
56
+ end
57
+
58
+ it "does not trigger force-full if the schema hash matches" do
59
+ allow(dumper).to receive(:table_schema_hash).and_return("def456")
60
+ dest.preinitialize_dump(context)
61
+ expect(context[:force_full_tables] || []).to_not include "test_table"
62
+ end
63
+
64
+ it "triggers force-full if the schema hash does not match" do
65
+ allow(dumper).to receive(:table_schema_hash).and_return("def123")
66
+ dest.preinitialize_dump(context)
67
+ expect(context[:force_full_tables]).to include "test_table"
68
+ end
69
+
70
+ it "triggers force-full if versions do not match" do
71
+ response_body[:schema]["table_with_version"] = { ids_meta: { table_warehouse_name: "table_with_version", table_schema_version: "1.1" } }
72
+ dest.preinitialize_dump(context)
73
+ expect(context[:force_full_tables]).to include "table_with_version"
74
+ end
75
+
76
+ it "does not trigger force-full if the versions match" do
77
+ response_body[:schema]["table_with_version"] = { ids_meta: { table_warehouse_name: "table_with_version", table_schema_version: "1.0" } }
78
+ dest.preinitialize_dump(context)
79
+ expect(context[:force_full_tables] || []).to_not include "table_with_version"
80
+ end
81
+
82
+ it "does not consider the schema hash if a version is present" do
83
+ response_body[:schema]["table_with_version"] = { ids_meta: { table_warehouse_name: "table_with_version", table_schema_version: "1.0", table_schema_hash: "not_matching" } }
84
+ dest.preinitialize_dump(context)
85
+ expect(context[:force_full_tables]).to_not include "table_with_version"
86
+ end
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,56 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe InstDataShipper::Dumper do
4
+ let(:dumper) { TestDumper.new }
5
+ let(:context) { {
6
+ incremental_since: 1.day.ago.iso8601,
7
+ } }
8
+
9
+ before :each do
10
+ allow(dumper).to receive(:batch_context).and_return(context)
11
+ end
12
+
13
+ describe "#auto_enqueue_from_schema" do
14
+ it "makes the expected calls" do
15
+ expect(dumper).to receive(:import_local_table).with(InstDataShipper::DumpBatch, schema_name: "local_table_test")
16
+ dumper.send(:enqueue_table_from_schema, "local_table_test")
17
+ end
18
+
19
+ it "enqueues all tables with a sourcer" do
20
+ expect(dumper).to receive(:enqueue_table_from_schema).with(hash_including(warehouse_name: "test_table"))
21
+ expect(dumper).to receive(:enqueue_table_from_schema).with(hash_including(warehouse_name: "local_table_test"))
22
+ allow(dumper).to receive(:enqueue_table_from_schema)
23
+ dumper.send(:auto_enqueue_from_schema)
24
+ end
25
+
26
+ it "doesn't try to enqueue a table with no sourcer" do
27
+ expect(dumper).to_not receive(:enqueue_table_from_schema).with(hash_including(warehouse_name: "incremental_table"))
28
+ allow(dumper).to receive(:enqueue_table_from_schema)
29
+ dumper.send(:auto_enqueue_from_schema)
30
+ end
31
+ end
32
+
33
+ describe "#table_is_incremental?" do
34
+ it "usually returns as the table defines" do
35
+ expect(dumper.table_is_incremental?("test_table")).to be false
36
+ expect(dumper.table_is_incremental?("incremental_table")).to be true
37
+ end
38
+
39
+ it "returns false if incremental_since is nil" do
40
+ context[:incremental_since] = nil
41
+ expect(dumper.table_is_incremental?("incremental_table")).to be false
42
+ end
43
+
44
+ it "returns false if table is forced to full" do
45
+ context[:force_full_tables] = ["incremental_table"]
46
+ expect(dumper.table_is_incremental?("incremental_table")).to be false
47
+ end
48
+
49
+ it "supports a proc for the if: option" do
50
+ dumper.instance_variable_set(:@proc_incremental, true)
51
+ expect(dumper.table_is_incremental?("proc_incremental_table")).to be true
52
+ dumper.instance_variable_set(:@proc_incremental, false)
53
+ expect(dumper.table_is_incremental?("proc_incremental_table")).to be false
54
+ end
55
+ end
56
+ end
data/spec/spec_helper.rb CHANGED
@@ -10,7 +10,7 @@ require 'spec_helper'
10
10
  require 'factory_bot_rails'
11
11
  require 'timecop'
12
12
  require 'webmock/rspec'
13
- require 'support/fake_canvas'
13
+ require 'support/test_dumper'
14
14
  require 'shoulda/matchers'
15
15
  require 'pry'
16
16
  require 'pry-nav'
@@ -18,11 +18,6 @@ require 'with_model'
18
18
 
19
19
  require 'sidekiq/testing'
20
20
  Sidekiq::Testing.fake!
21
- Sidekiq::Testing.server_middleware do |chain|
22
- chain.add CanvasSync::JobBatches::Sidekiq::ServerMiddleware
23
- end
24
-
25
- Dir[File.dirname(__FILE__) + "/job_batching/support/**/*.rb"].each {|f| require f }
26
21
 
27
22
  ActiveRecord::Migration.maintain_test_schema!
28
23
 
@@ -43,10 +38,6 @@ RSpec.configure do |config|
43
38
  config.expect_with :rspec do |c|
44
39
  c.syntax = :expect
45
40
  end
46
-
47
- config.before(:each) do
48
- stub_request(:any, /test.instructure.com/).to_rack(FakeCanvas)
49
- end
50
41
  end
51
42
 
52
43
  Shoulda::Matchers.configure do |config|
@@ -0,0 +1,31 @@
1
+
2
+ TestSchema = InstDataShipper::SchemaBuilder.build do
3
+ table("test_table", model: "Account") do
4
+ source ->(tdef) {}
5
+
6
+ column :id, :"varchar(32)", from: "Id"
7
+ end
8
+
9
+ table("table_with_version", model: "Account") do
10
+ version "1.0"
11
+ column :id
12
+ end
13
+
14
+ table("local_table_test", model: InstDataShipper::DumpBatch) do
15
+ source :local_table
16
+
17
+ column :id
18
+ end
19
+
20
+ table("incremental_table") do
21
+ incremental
22
+ column :id, :"varchar(32)", from: "Id"
23
+ end
24
+
25
+ table("proc_incremental_table") do
26
+ incremental if: -> { @proc_incremental }
27
+ column :id, :"varchar(32)", from: "Id"
28
+ end
29
+ end
30
+
31
+ TestDumper = InstDataShipper::Dumper.define(schema: TestSchema)
metadata CHANGED
@@ -1,43 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: inst_data_shipper
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Instructure CustomDev
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-03-20 00:00:00.000000000 Z
11
+ date: 2024-03-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ">="
18
18
  - !ruby/object:Gem::Version
19
19
  version: '6.0'
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "~>"
24
+ - - ">="
25
25
  - !ruby/object:Gem::Version
26
26
  version: '6.0'
27
- - !ruby/object:Gem::Dependency
28
- name: rake
29
- requirement: !ruby/object:Gem::Requirement
30
- requirements:
31
- - - "~>"
32
- - !ruby/object:Gem::Version
33
- version: '10.0'
34
- type: :development
35
- prerelease: false
36
- version_requirements: !ruby/object:Gem::Requirement
37
- requirements:
38
- - - "~>"
39
- - !ruby/object:Gem::Version
40
- version: '10.0'
41
27
  - !ruby/object:Gem::Dependency
42
28
  name: rspec
43
29
  requirement: !ruby/object:Gem::Requirement
@@ -220,20 +206,6 @@ dependencies:
220
206
  - - ">="
221
207
  - !ruby/object:Gem::Version
222
208
  version: '0'
223
- - !ruby/object:Gem::Dependency
224
- name: apartment
225
- requirement: !ruby/object:Gem::Requirement
226
- requirements:
227
- - - ">="
228
- - !ruby/object:Gem::Version
229
- version: '0'
230
- type: :development
231
- prerelease: false
232
- version_requirements: !ruby/object:Gem::Requirement
233
- requirements:
234
- - - ">="
235
- - !ruby/object:Gem::Version
236
- version: '0'
237
209
  - !ruby/object:Gem::Dependency
238
210
  name: with_model
239
211
  requirement: !ruby/object:Gem::Requirement
@@ -346,20 +318,6 @@ dependencies:
346
318
  - - ">="
347
319
  - !ruby/object:Gem::Version
348
320
  version: 0.18.0
349
- - !ruby/object:Gem::Dependency
350
- name: rubyzip
351
- requirement: !ruby/object:Gem::Requirement
352
- requirements:
353
- - - ">="
354
- - !ruby/object:Gem::Version
355
- version: '0'
356
- type: :runtime
357
- prerelease: false
358
- version_requirements: !ruby/object:Gem::Requirement
359
- requirements:
360
- - - ">="
361
- - !ruby/object:Gem::Version
362
- version: '0'
363
321
  - !ruby/object:Gem::Dependency
364
322
  name: faraday_middleware
365
323
  requirement: !ruby/object:Gem::Requirement
@@ -405,6 +363,7 @@ files:
405
363
  - lib/inst_data_shipper/version.rb
406
364
  - spec/dummy/README.rdoc
407
365
  - spec/dummy/Rakefile
366
+ - spec/dummy/app/models/application_record.rb
408
367
  - spec/dummy/bin/rails
409
368
  - spec/dummy/config.ru
410
369
  - spec/dummy/config/application.rb
@@ -419,9 +378,12 @@ files:
419
378
  - spec/dummy/config/routes.rb
420
379
  - spec/dummy/config/secrets.yml
421
380
  - spec/dummy/db/schema.rb
381
+ - spec/inst_data_shipper/destinations/hosted_data_spec.rb
382
+ - spec/inst_data_shipper/dumper_spec.rb
422
383
  - spec/spec_helper.rb
423
384
  - spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv
424
385
  - spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv
386
+ - spec/support/test_dumper.rb
425
387
  homepage: https://instructure.com
426
388
  licenses: []
427
389
  metadata: {}
@@ -440,27 +402,31 @@ required_rubygems_version: !ruby/object:Gem::Requirement
440
402
  - !ruby/object:Gem::Version
441
403
  version: '0'
442
404
  requirements: []
443
- rubygems_version: 3.1.6
405
+ rubygems_version: 3.5.3
444
406
  signing_key:
445
407
  specification_version: 4
446
408
  summary: Gem for uploading app data to Instructure CustomDev Hosted Data tooling
447
409
  test_files:
448
- - spec/spec_helper.rb
449
- - spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv
450
- - spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv
451
- - spec/dummy/db/schema.rb
410
+ - spec/dummy/README.rdoc
452
411
  - spec/dummy/Rakefile
453
- - spec/dummy/config/secrets.yml
412
+ - spec/dummy/app/models/application_record.rb
413
+ - spec/dummy/bin/rails
454
414
  - spec/dummy/config/application.rb
455
- - spec/dummy/config/initializers/session_store.rb
456
- - spec/dummy/config/initializers/wrap_parameters.rb
457
- - spec/dummy/config/initializers/assets.rb
415
+ - spec/dummy/config/boot.rb
416
+ - spec/dummy/config/database.yml
417
+ - spec/dummy/config/environment.rb
458
418
  - spec/dummy/config/environments/development.rb
459
419
  - spec/dummy/config/environments/test.rb
460
- - spec/dummy/config/database.yml
461
- - spec/dummy/config/boot.rb
420
+ - spec/dummy/config/initializers/assets.rb
421
+ - spec/dummy/config/initializers/session_store.rb
422
+ - spec/dummy/config/initializers/wrap_parameters.rb
462
423
  - spec/dummy/config/routes.rb
463
- - spec/dummy/config/environment.rb
464
- - spec/dummy/README.rdoc
424
+ - spec/dummy/config/secrets.yml
465
425
  - spec/dummy/config.ru
466
- - spec/dummy/bin/rails
426
+ - spec/dummy/db/schema.rb
427
+ - spec/inst_data_shipper/destinations/hosted_data_spec.rb
428
+ - spec/inst_data_shipper/dumper_spec.rb
429
+ - spec/spec_helper.rb
430
+ - spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv
431
+ - spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv
432
+ - spec/support/test_dumper.rb