master_data_tool 0.16.0 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '09f03f9ca56ad09dd1f511a53248da3ffe917a0e8be0dba68e3bfbb8f61f74bb'
4
- data.tar.gz: 56d9f18f92c96ca4cf2f5bac42edc369d861cbf1cb3ba3e39cb6c8b19eccbdc0
3
+ metadata.gz: 5d77c4691ca7880f5b1ab73c4d0e5d424e743cb2973e88cb2fc45e7d15cf7acf
4
+ data.tar.gz: a18d5f0ec46051a0c45db4e369270a5387a827cafa6b17760ead426152a1fd3e
5
5
  SHA512:
6
- metadata.gz: 229dc35b454ca6adafddc65125042649172d203a87554fbe161cade5ad6f17c9e645c40108cb37a937cbf1bfa496b06f0ad6406e904e706a7945b4a9ffe9bb2f
7
- data.tar.gz: 285b4f485f23c1215b8029af39bac2808a85050506e02cb92e1662ddaf0ccf634f9a8de4b821663f297d78f6e8a74d61a0920af4e2570ebe19200a0d1ec2f121
6
+ metadata.gz: 644b4a2322932c327a75d1cd955458689e4e08151301cbb1f5782a7465a62bfc80ba823774b0d77b2a525ce9cbd68dd28e6a14b354473608c4a7b61ef4cbf9b1
7
+ data.tar.gz: 91eb8811ca518eb91311c2d4a50dea7bb778edfa7f106e0d6d09e6bc5b866d4e12c6375936aa31f02f918c73979e9b81fa1038531f1ca9281e6a80d246516aa7
data/README.md CHANGED
@@ -35,18 +35,18 @@ Or install it yourself as:
35
35
 
36
36
  ### マスタデータの投入
37
37
 
38
- | option | default | 内容 |
39
- |----------------------| --- |-----------------------------------------------------------------|
40
- | --dry-run | true | dry-runモードで実行する(データ変更は行わない) |
41
- | --verify | true | データ投入後に全テーブル・全レコードのバリデーションチェックを行う |
42
- | --only-import-tables | [] | 指定したテーブルのみデータ投入を行う |
43
- | --except-import-tables | [] | 指定したテーブルのデータ投入を行わない |
44
- | --only-verify-tables | [] | 指定したテーブルのみ投入後のバリデーションチェックを行う |
45
- | --except-verify-tables | [] | 指定したテーブルのバリデーションチェックを行わない |
46
- | --skip-no-change | true | CSVファイルに更新がないテーブルをスキップする |
47
- | --silent | false | 結果の出力をやめる |
38
+ | option | default | 内容 |
39
+ |---------------------------------| --- |-----------------------------------------------------------------|
40
+ | --dry-run | true | dry-runモードで実行する(データ変更は行わない) |
41
+ | --verify | true | データ投入後に全テーブル・全レコードのバリデーションチェックを行う |
42
+ | --only-import-tables | [] | 指定したテーブルのみデータ投入を行う |
43
+ | --except-import-tables | [] | 指定したテーブルのデータ投入を行わない |
44
+ | --only-verify-tables | [] | 指定したテーブルのみ投入後のバリデーションチェックを行う |
45
+ | --except-verify-tables | [] | 指定したテーブルのバリデーションチェックを行わない |
46
+ | --skip-no-change | true | CSVファイルに更新がないテーブルをスキップする |
47
+ | --silent | false | 結果の出力をやめる |
48
48
  | --delete-all-ignore-foreign-key | false | 外部キー制約を無視してレコードを消すかどうか |
49
- | --override_identifier | nil | fixtures/#{override_identifier} のディレクトリにある内容でfixturesを上書きして投入する |
49
+ | --override-identifier | nil | fixtures/#{override_identifier} のディレクトリにある内容でfixturesを上書きして投入する |
50
50
 
51
51
  ```bash
52
52
  bundle exec master_data_tool import
@@ -9,12 +9,16 @@ module MasterDataTool
9
9
  config_accessor :dump_ignore_columns
10
10
  config_accessor :default_import_options
11
11
  config_accessor :logger
12
+ config_accessor :preload_associations
13
+ config_accessor :eager_load_associations
12
14
 
13
15
  def initialize
14
16
  self.master_data_dir = nil
15
17
  self.dump_ignore_tables = %w[]
16
18
  self.dump_ignore_columns = %w[]
17
19
  self.default_import_options = {}
20
+ self.preload_associations = {} # key: Class, value: associations
21
+ self.eager_load_associations = {} # key: Class, value: associations
18
22
  self.logger = Logger.new(nil)
19
23
  end
20
24
  end
@@ -27,23 +27,25 @@ module MasterDataTool
27
27
  @override_identifier = override_identifier
28
28
  @report_printer = report_printer
29
29
  @report_printer.silent = silent
30
+ @master_data_statuses = []
30
31
  end
31
32
 
32
33
  def execute
33
34
  ApplicationRecord.transaction do
34
35
  print_execute_options
36
+ load_master_data_statuses
35
37
 
36
- master_data_list = build_master_data_list
38
+ master_data_collection = build_master_data_collection
37
39
 
38
- import_all!(master_data_list)
39
- verify_all!(master_data_list) if @verify
40
- save_master_data_statuses!(master_data_list)
40
+ import_all!(master_data_collection)
41
+ verify_all!(master_data_collection) if @verify
42
+ save_master_data_statuses!(master_data_collection)
41
43
 
42
- print_affected_tables(master_data_list)
44
+ print_affected_tables(master_data_collection)
43
45
 
44
46
  raise DryRunError if @dry_run
45
47
 
46
- master_data_list
48
+ master_data_collection
47
49
  end
48
50
  rescue DryRunError
49
51
  puts "[DryRun] end"
@@ -51,6 +53,8 @@ module MasterDataTool
51
53
 
52
54
  private
53
55
 
56
+ attr_reader :master_data_statuses
57
+
54
58
  def print_execute_options
55
59
  return if @silent
56
60
 
@@ -61,22 +65,18 @@ module MasterDataTool
61
65
  puts "================="
62
66
  end
63
67
 
64
- def build_master_data_list
65
- [].tap do |master_data_list|
66
- MasterDataTool::Import::MasterDataFileList.new(override_identifier: @override_identifier).build.each do |master_data_file|
68
+ def build_master_data_collection
69
+ MasterDataCollection.new.tap do |collection|
70
+ MasterDataTool::MasterDataFileCollection.new(override_identifier: @override_identifier).each do |master_data_file|
67
71
  load_skip = load_skip_table?(master_data_file)
68
-
69
- model_klass = Object.const_get(master_data_file.table_name.classify)
70
- master_data = MasterData.new(master_data_file, model_klass)
71
- master_data.load unless load_skip
72
-
73
- master_data_list << master_data
72
+ master_data = MasterData.build(master_data_file, load: !load_skip)
73
+ collection.append(master_data)
74
74
  end
75
- end.sort_by { |m| m.basename } # 外部キー制約などがある場合には先に入れておかないといけないデータなどがある。なので、プレフィックスを付けて順序を指定して貰う
75
+ end
76
76
  end
77
77
 
78
- def import_all!(master_data_list)
79
- master_data_list.each do |master_data|
78
+ def import_all!(master_data_collection)
79
+ master_data_collection.each do |master_data|
80
80
  next unless master_data.loaded?
81
81
  next if import_skip_table?(master_data.table_name)
82
82
 
@@ -85,8 +85,8 @@ module MasterDataTool
85
85
  end
86
86
  end
87
87
 
88
- def verify_all!(master_data_list)
89
- master_data_list.each do |master_data|
88
+ def verify_all!(master_data_collection)
89
+ master_data_collection.each do |master_data|
90
90
  next if verify_skip_table?(master_data.table_name)
91
91
 
92
92
  report = master_data.verify!(ignore_fail: @dry_run)
@@ -94,9 +94,9 @@ module MasterDataTool
94
94
  end
95
95
  end
96
96
 
97
- def save_master_data_statuses!(master_data_list)
97
+ def save_master_data_statuses!(master_data_collection)
98
98
  records = []
99
- master_data_list.each do |master_data|
99
+ master_data_collection.each do |master_data|
100
100
  next unless master_data.loaded?
101
101
 
102
102
  records << MasterDataTool::MasterDataStatus.build(master_data.master_data_file)
@@ -105,8 +105,8 @@ module MasterDataTool
105
105
  MasterDataTool::MasterDataStatus.import_records!(records, dry_run: @dry_run)
106
106
  end
107
107
 
108
- def print_affected_tables(master_data_list)
109
- master_data_list.each do |master_data|
108
+ def print_affected_tables(master_data_collection)
109
+ master_data_collection.each do |master_data|
110
110
  next unless master_data.loaded?
111
111
  next unless master_data.affected?
112
112
 
@@ -119,7 +119,10 @@ module MasterDataTool
119
119
  return true if import_skip_table?(master_data_file.table_name)
120
120
  return false unless @skip_no_change
121
121
 
122
- !MasterDataTool::MasterDataStatus.master_data_will_change?(master_data_file)
122
+ master_data_status = master_data_statuses.dig(master_data_file.table_name)
123
+ return false unless master_data_status
124
+
125
+ !master_data_status.will_change?(master_data_file)
123
126
  end
124
127
 
125
128
  def import_skip_table?(table_name)
@@ -156,6 +159,10 @@ module MasterDataTool
156
159
  pattern = Pathname.new(MasterDataTool.config.master_data_dir).join(@override_identifier).join('*.csv').to_s
157
160
  Pathname.glob(pattern).select(&:file?)
158
161
  end
162
+
163
+ def load_master_data_statuses
164
+ @master_data_statuses = MasterDataTool::MasterDataStatus.fetch_all
165
+ end
159
166
  end
160
167
  end
161
168
  end
@@ -1,4 +1,3 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require_relative 'import/master_data_file_list'
4
3
  require_relative "import/executor"
@@ -19,6 +19,15 @@ module MasterDataTool
19
19
  @deleted_records = []
20
20
  end
21
21
 
22
+ class << self
23
+ def build(master_data_file, load: false)
24
+ model_klass = Object.const_get(master_data_file.table_name.classify)
25
+ new(master_data_file, model_klass).tap do |record|
26
+ record.load if load
27
+ end
28
+ end
29
+ end
30
+
22
31
  def basename
23
32
  @master_data_file.basename
24
33
  end
@@ -126,12 +135,20 @@ module MasterDataTool
126
135
 
127
136
  def verify!(ignore_fail: false)
128
137
  MasterDataTool::Report::VerifyReport.new(self).tap do |report|
129
- @model_klass.all.find_each do |record|
138
+ scoped = @model_klass.all
139
+ scoped = scoped.preload(preload_associations) if preload_associations
140
+ scoped = scoped.eager_load(eager_load_associations) if eager_load_associations
141
+
142
+ scoped.find_each do |record|
130
143
  valid = record.valid?
131
144
  report.append(MasterDataTool::Report::VerifyReport.build_verify_record_report(self, record, valid))
145
+ next if valid
132
146
  next if ignore_fail
133
147
 
134
- raise MasterDataTool::VerifyFailed.new("[#{table_name}] id = #{record.id} is invalid") unless valid
148
+ e = MasterDataTool::VerifyFailed.new("[#{table_name}] id = #{record.id} is invalid")
149
+ e.errors = record.errors
150
+
151
+ raise e
135
152
  end
136
153
  end
137
154
  end
@@ -145,6 +162,14 @@ module MasterDataTool
145
162
 
146
163
  private
147
164
 
165
+ def preload_associations
166
+ @preload_associations ||= MasterDataTool.config.preload_associations.dig(@model_klass.to_s.to_sym)
167
+ end
168
+
169
+ def eager_load_associations
170
+ @eager_load_associations ||= MasterDataTool.config.eager_load_associations.dig(@model_klass.to_s.to_sym)
171
+ end
172
+
148
173
  def build_records_from_csv(csv, old_records_by_id)
149
174
  {}.tap do |records|
150
175
  csv.each do |row|
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module MasterDataTool
4
+ class MasterDataCollection
5
+ def initialize
6
+ @collection = []
7
+ end
8
+
9
+ def append(master_data)
10
+ @collection << master_data
11
+ end
12
+
13
+ def each
14
+ return enum_for(:each) unless block_given?
15
+
16
+ @collection.sort_by(&:basename).each do |master_data|
17
+ yield master_data
18
+ end
19
+ end
20
+
21
+ def to_a
22
+ each.to_a
23
+ end
24
+ end
25
+ end
@@ -1,13 +1,35 @@
1
1
  # frozen_string_literal: true
2
2
  module MasterDataTool
3
- class MasterDataFile < Struct.new(:table_name, :path, :override_identifier)
3
+ class MasterDataFile
4
+ attr_reader :table_name, :path, :override_identifier
5
+
4
6
  def initialize(table_name, path, override_identifier)
5
- super(table_name, path, override_identifier)
7
+ @table_name = table_name
8
+ @path = path
9
+ @override_identifier = override_identifier
6
10
  freeze
7
11
  end
8
12
 
13
+ class << self
14
+ def build(path, override_identifier)
15
+ table_name = MasterDataTool.resolve_table_name(path, override_identifier)
16
+ new(table_name, path, override_identifier)
17
+ end
18
+ end
19
+
9
20
  def basename
10
- self.path.basename
21
+ @path.basename
22
+ end
23
+
24
+ def ==(other)
25
+ other.class === self &&
26
+ other.hash == hash
27
+ end
28
+
29
+ alias eql? ==
30
+
31
+ def hash
32
+ [@table_name, @path, @override_identifier].join.hash
11
33
  end
12
34
  end
13
35
  end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ module MasterDataTool
4
+ class MasterDataFileCollection
5
+ def initialize(override_identifier: nil)
6
+ @override_identifier = override_identifier
7
+ @collection = build
8
+ freeze
9
+ end
10
+
11
+ def each
12
+ return enum_for(:each) unless block_given?
13
+
14
+ @collection.each do |file|
15
+ yield file
16
+ end
17
+ end
18
+
19
+ def to_a
20
+ each.to_a
21
+ end
22
+
23
+ private
24
+
25
+ def build
26
+ files = extract_master_data_csv_paths.presence&.index_by(&:table_name)
27
+ overridden_files = overridden_master_data_csv_paths.presence&.index_by(&:table_name) || {}
28
+
29
+ table_names = (files.keys + overridden_files.keys).uniq
30
+ table_names.map do |table_name|
31
+ overridden_files[table_name] || files[table_name]
32
+ end
33
+ end
34
+
35
+ def extract_master_data_csv_paths
36
+ pattern = Pathname.new(MasterDataTool.config.master_data_dir).join('*.csv').to_s
37
+ Pathname.glob(pattern).select(&:file?).map do |path|
38
+ MasterDataFile.build(path, nil)
39
+ end
40
+ end
41
+
42
+ def overridden_master_data_csv_paths
43
+ return [] if @override_identifier.blank?
44
+
45
+ pattern = Pathname.new(MasterDataTool.config.master_data_dir).join(@override_identifier).join('*.csv').to_s
46
+ Pathname.glob(pattern).select(&:file?).map do |path|
47
+ MasterDataFile.build(path, @override_identifier)
48
+ end
49
+ end
50
+ end
51
+ end
@@ -14,7 +14,17 @@ module MasterDataTool
14
14
  validates :version,
15
15
  presence: true
16
16
 
17
+ def will_change?(master_data_file)
18
+ raise unless name == master_data_file.table_name
19
+
20
+ version != self.class.decide_version(master_data_file.path)
21
+ end
22
+
17
23
  class << self
24
+ def fetch_all
25
+ all.index_by(&:name)
26
+ end
27
+
18
28
  def build(master_data_file)
19
29
  version = decide_version(master_data_file.path)
20
30
  new(name: MasterDataTool.resolve_table_name(master_data_file.path, master_data_file.override_identifier), version: version)
@@ -23,15 +33,10 @@ module MasterDataTool
23
33
  def import_records!(records, dry_run: true)
24
34
  if dry_run
25
35
  pp records
26
- else
27
- import!(records, validate: true, on_duplicate_key_update: %w[name version], timestamps: true)
36
+ return
28
37
  end
29
- end
30
38
 
31
- # @param [MasterDataTool::MasterDataFile] master_data_file
32
- def master_data_will_change?(master_data_file)
33
- new_version = decide_version(master_data_file.path)
34
- !where(name: master_data_file.table_name, version: new_version).exists?
39
+ import!(records, validate: true, on_duplicate_key_update: %w[name version], timestamps: true)
35
40
  end
36
41
 
37
42
  def decide_version(csv_path)
@@ -7,6 +7,7 @@ module MasterDataTool
7
7
 
8
8
  def print(message)
9
9
  return if @silent
10
+ return if message.blank?
10
11
 
11
12
  MasterDataTool.config.logger.info message
12
13
  puts message
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module MasterDataTool
4
- VERSION = "0.16.0"
4
+ VERSION = "0.19.0"
5
5
  end
@@ -1,11 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'csv'
4
+ require 'socket'
4
5
  require_relative "master_data_tool/version"
5
6
  require_relative "master_data_tool/config"
6
7
  require_relative "master_data_tool/master_data_status"
7
8
  require_relative "master_data_tool/master_data_file"
9
+ require_relative "master_data_tool/master_data_file_collection"
8
10
  require_relative "master_data_tool/master_data"
11
+ require_relative "master_data_tool/master_data_collection"
9
12
  require_relative "master_data_tool/report"
10
13
  require_relative "master_data_tool/dump/executor"
11
14
  require_relative "master_data_tool/import"
@@ -13,9 +16,13 @@ require_relative "master_data_tool/import"
13
16
  module MasterDataTool
14
17
  class Error < StandardError; end
15
18
  class DryRunError < StandardError; end
16
- class VerifyFailed < StandardError; end
17
19
  class NotLoadedError < StandardError; end
18
20
 
21
+ class VerifyFailed < StandardError
22
+ attr_accessor :errors
23
+ delegate :full_messages, to: :errors
24
+ end
25
+
19
26
  class << self
20
27
  def config
21
28
  @config ||= Config.new
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "lib/master_data_tool/version"
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = "master_data_tool"
7
+ spec.version = MasterDataTool::VERSION
8
+ spec.authors = ["Takahiro Ooishi"]
9
+ spec.email = ["taka0125@gmail.com"]
10
+
11
+ spec.summary = "マスタデータの管理ツール"
12
+ spec.description = "システムが稼働する上で最初から必要なデータ(マスタデータ)を管理するツールです。"
13
+ spec.homepage = "https://github.com/taka0125/master_data_tool"
14
+ spec.required_ruby_version = ">= 2.6.0"
15
+
16
+ spec.metadata["homepage_uri"] = spec.homepage
17
+ spec.metadata["source_code_uri"] = spec.homepage
18
+
19
+ # Specify which files should be added to the gem when it is released.
20
+ # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
21
+ spec.files = Dir.chdir(File.expand_path(__dir__)) do
22
+ `git ls-files -z`.split("\x0").reject do |f|
23
+ (f == __FILE__) || f.match(%r{\A(?:(?:test|spec|features)/|\.(?:git|travis|circleci)|appveyor)})
24
+ end
25
+ end
26
+ spec.bindir = "exe"
27
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
28
+ spec.require_paths = ["lib"]
29
+
30
+ spec.add_development_dependency 'rspec'
31
+ spec.add_development_dependency 'mysql2'
32
+ spec.add_development_dependency 'psych', '~> 3.1'
33
+ spec.add_development_dependency 'appraisal'
34
+ spec.add_development_dependency 'ridgepole'
35
+ spec.add_development_dependency 'database_cleaner-active_record'
36
+ spec.add_development_dependency 'standalone_activerecord_boot_loader'
37
+
38
+ spec.add_dependency 'activerecord', '>= 5.1.7'
39
+ spec.add_dependency 'activesupport'
40
+ spec.add_dependency 'thor'
41
+ spec.add_dependency 'activerecord-import'
42
+ end
@@ -0,0 +1,11 @@
1
+ #!/bin/bash
2
+ set -ex
3
+
4
+ CURRENT=$(cd $(dirname $0);pwd)
5
+
6
+ mysql \
7
+ -u ${DB_USERNAME} \
8
+ -h ${DB_HOST} \
9
+ -p${DB_PASSWORD} \
10
+ --port ${DB_PORT} \
11
+ -e "DROP DATABASE ${DB_NAME}"
@@ -1,4 +1,230 @@
1
+ # TypeProf 0.21.3
2
+
3
+ # Classes
1
4
  module MasterDataTool
2
5
  VERSION: String
3
- # See the writing guide of rbs: https://github.com/ruby/rbs#guides
6
+ self.@config: Config
7
+
8
+ def self.config: -> Config
9
+ def self.configure: -> untyped
10
+ def self.resolve_table_name: (Pathname csv_path, String? override_identifier) -> String
11
+
12
+ class Config
13
+ def initialize: -> void
14
+ end
15
+
16
+ class MasterDataStatus
17
+ def will_change?: (MasterDataFile master_data_file) -> bool
18
+ def self.fetch_all: -> Hash[String,MasterDataStatus]
19
+ def self.build: (MasterDataFile master_data_file) -> MasterDataStatus
20
+ def self.import_records!: (Array[MasterDataStatus] records, dry_run: bool) -> Array[MasterDataStatus]
21
+ def self.master_data_will_change?: (MasterDataFile master_data_file) -> bool
22
+ def self.decide_version: (Pathname csv_path) -> String
23
+ end
24
+
25
+ class MasterDataFile
26
+ attr_reader table_name: String
27
+ attr_reader path: Pathname
28
+ attr_reader override_identifier: String?
29
+ def initialize: (String table_name, Pathname path, String? override_identifier) -> void
30
+ def self.build: (Pathname path, String? override_identifier) -> MasterDataFile
31
+ def basename: -> Pathname
32
+ def ==: (untyped other) -> bool
33
+ alias eql? ==
34
+ def hash: -> Integer
35
+ end
36
+
37
+ class MasterDataFileCollection
38
+ @override_identifier: String?
39
+ @collection: Array[MasterDataFile]
40
+
41
+ def initialize: (override_identifier: String?) -> void
42
+ def each: ?{ -> Array[MasterDataFile] } -> Enumerator[bot, untyped]
43
+ def to_a: -> Array[MasterDataFile]
44
+
45
+ private
46
+ def build: -> Array[MasterDataFile]
47
+ def extract_master_data_csv_paths: -> Array[MasterDataFile]
48
+ def overridden_master_data_csv_paths: -> Array[MasterDataFile]
49
+ end
50
+
51
+ class MasterData
52
+ @loaded: bool
53
+ @affected: bool
54
+ @preload_associations: Array[untyped]
55
+ @eager_load_associations: Array[untyped]
56
+
57
+ attr_reader master_data_file: MasterDataFile
58
+ attr_reader model_klass: untyped
59
+ attr_reader columns: Array[String]
60
+ attr_reader new_records: Array[untyped]
61
+ def new_records: -> Array[untyped]
62
+ attr_reader updated_records: Array[untyped]
63
+ def updated_records: -> Array[untyped]
64
+ attr_reader no_change_records: Array[untyped]
65
+ def no_change_records: -> Array[untyped]
66
+ attr_reader deleted_records: Array[untyped]
67
+ def deleted_records: -> Array[untyped]
68
+ attr_reader before_count: Integer
69
+ def before_count: -> Integer
70
+ attr_reader after_count: Integer
71
+ def after_count: -> Integer
72
+ def initialize: (MasterDataFile master_data_file, untyped model_klass) -> void
73
+ def self.build: (MasterDataFile master_data_file, ?load: bool) -> MasterData
74
+ def basename: -> Pathname
75
+ def load: -> true
76
+ def import_records: -> Array[untyped]
77
+ def affected_records: -> Array[untyped]
78
+ def loaded?: -> bool
79
+ def affected?: -> bool?
80
+ def table_name: -> String
81
+ def import!: (?dry_run: true, ?delete_all_ignore_foreign_key: false) -> Report::ImportReport
82
+ def verify!: (?ignore_fail: bool) -> Report::VerifyReport
83
+ def print_affected_table: -> Report::PrintAffectedTableReport?
84
+
85
+ private
86
+ def preload_associations: -> Array[untyped]
87
+ def eager_load_associations: -> Array[untyped]
88
+ def build_records_from_csv: (Array[Array[String?]] csv, Hash[Integer, untyped] old_records_by_id) -> Hash[Integer, untyped]
89
+ def enable_foreign_key_checks: -> untyped
90
+ def disable_foreign_key_checks: -> untyped
91
+ end
92
+
93
+ class MasterDataCollection
94
+ @collection: Array[MasterData]
95
+
96
+ def initialize: -> void
97
+ def append: (MasterData master_data) -> Array[MasterData]
98
+ def each: ?{ (MasterData) -> (Array[MasterDataStatus | {operation: :verify, table_name: untyped, valid: untyped, id: untyped}]?) } -> (Array[MasterData] | Enumerator[MasterData, untyped] | Enumerator[untyped, untyped])
99
+ def to_a: -> Array[MasterData]
100
+ end
101
+
102
+ module Report
103
+ module Printer
104
+ attr_accessor silent: false
105
+ def initialize: (?silent: false) -> void
106
+ def print: (String message) -> nil
107
+ end
108
+
109
+ class DefaultPrinter
110
+ include Printer
111
+
112
+ def print: (String message) -> nil
113
+ end
114
+
115
+ module Core
116
+ def initialize: (MasterData master_data) -> void
117
+ def print: (Printer printer) -> untyped
118
+
119
+ private
120
+ def convert_to_ltsv: ({operation: :affected_table | :verify, table_name: untyped, valid: untyped, id: untyped} items) -> String
121
+ end
122
+
123
+ class ImportReport
124
+ include Core
125
+ @master_data: MasterData
126
+
127
+ attr_reader reports: Array[untyped]
128
+ def reports: -> Array[untyped]
129
+ def print: (Printer printer) -> untyped
130
+
131
+ private
132
+ def count_report: -> Hash[Symbol, Array[Hash[Symbol, untyped]]]
133
+ def new_records_report: -> Hash[Symbol, Array[Hash[Symbol, untyped]]]
134
+ def updated_records_report: -> Hash[Symbol, Array[Hash[Symbol, untyped]]]
135
+ def no_change_records_report: -> Hash[Symbol, Array[Hash[Symbol, untyped]]]
136
+ def deleted_records_report: -> Hash[Symbol, Array[Hash[Symbol, untyped]]]
137
+ end
138
+
139
+ class VerifyReport
140
+ include Core
141
+ @master_data: MasterData
142
+
143
+ attr_reader reports: Array[{operation: :verify, table_name: untyped, valid: untyped, id: untyped}]
144
+ def initialize: (MasterData master_data) -> void
145
+ def append: ({operation: :verify, table_name: untyped, valid: untyped, id: untyped} verify_record_report) -> Array[{operation: :verify, table_name: untyped, valid: untyped, id: untyped}]
146
+ def print: (Printer printer) -> Array[{operation: :verify, table_name: untyped, valid: untyped, id: untyped}]
147
+ def self.build_verify_record_report: (MasterData master_data, untyped record, untyped valid) -> {operation: :verify, table_name: untyped, valid: untyped, id: untyped}
148
+ end
149
+
150
+ class PrintAffectedTableReport
151
+ include Core
152
+ @master_data: MasterData
153
+
154
+ def print: (Printer printer) -> nil
155
+ end
156
+ end
157
+
158
+ module Dump
159
+ class Executor
160
+ DEFAULT_IGNORE_TABLES: [String, String, String]
161
+ DEFAULT_IGNORE_COLUMNS: [String, String]
162
+ @ignore_empty_table: bool
163
+ @ignore_tables: Array[String]
164
+ @ignore_column_names: Array[String]
165
+ @only_tables: Array[String]
166
+ @verbose: bool
167
+
168
+ def initialize: (ignore_empty_table: bool, ignore_tables: Array[String], ignore_column_names: Array[String], only_tables: Array[String], verbose: bool) -> void
169
+ def execute: -> Array[untyped]
170
+
171
+ private
172
+ def print_message: (String message) -> nil
173
+ def dump_to_csv: (untyped table) -> nil
174
+ def ignore?: (untyped model_klass) -> false
175
+
176
+ class Error < Struct[untyped]
177
+ attr_accessor table(): untyped
178
+ attr_accessor exception(): nil
179
+ end
180
+ end
181
+ end
182
+
183
+ module Import
184
+ class Executor
185
+ @dry_run: bool
186
+ @verify: bool
187
+ @only_import_tables: Array[String]
188
+ @except_import_tables: Array[String]
189
+ @only_verify_tables: Array[String]
190
+ @except_verify_tables: Array[String]
191
+ @skip_no_change: bool
192
+ @silent: bool
193
+ @delete_all_ignore_foreign_key: bool
194
+ @override_identifier: String?
195
+ @report_printer: Report::DefaultPrinter
196
+ @master_data_statuses: [MasterDataStatus]
197
+
198
+ def initialize: (dry_run: bool, verify: bool, only_import_tables: Array[String], except_import_tables: Array[String], only_verify_tables: Array[String], except_verify_tables: Array[String], skip_no_change: bool, silent: bool, delete_all_ignore_foreign_key: bool, override_identifier: String?, report_printer: Report::Printer) -> void
199
+ def execute: -> nil
200
+
201
+ private
202
+ def print_execute_options: -> nil
203
+ def build_master_data_collection: -> MasterDataCollection
204
+ def import_all!: (MasterDataCollection master_data_collection) -> (Array[MasterData] | Enumerator[MasterData, untyped] | Enumerator[untyped, untyped])
205
+ def verify_all!: (MasterDataCollection master_data_collection) -> (Array[MasterData] | Enumerator[MasterData, untyped] | Enumerator[untyped, untyped])
206
+ def save_master_data_statuses!: (MasterDataCollection master_data_collection) -> Array[MasterDataStatus]
207
+ def print_affected_tables: (MasterDataCollection master_data_collection) -> (Array[MasterData] | Enumerator[MasterData, untyped] | Enumerator[untyped, untyped])
208
+ def load_skip_table?: (untyped master_data_file) -> bool
209
+ def import_skip_table?: (untyped table_name) -> bool
210
+ def verify_skip_table?: (untyped table_name) -> bool
211
+ def need_skip_table?: (untyped table_name, Array[untyped] only, Array[untyped] except) -> bool
212
+ def extract_master_data_csv_paths: -> Array[Pathname]
213
+ def overridden_master_data_csv_paths: -> Array[Pathname]
214
+ def master_data_statuses: -> untyped
215
+ end
216
+ end
217
+
218
+ class Error < StandardError
219
+ end
220
+
221
+ class DryRunError < StandardError
222
+ end
223
+
224
+ class NotLoadedError < StandardError
225
+ end
226
+
227
+ class VerifyFailed < StandardError
228
+ attr_accessor errors: untyped
229
+ end
4
230
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: master_data_tool
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.0
4
+ version: 0.19.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Takahiro Ooishi
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2022-06-11 00:00:00.000000000 Z
11
+ date: 2022-07-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -194,9 +194,10 @@ files:
194
194
  - lib/master_data_tool/dump/executor.rb
195
195
  - lib/master_data_tool/import.rb
196
196
  - lib/master_data_tool/import/executor.rb
197
- - lib/master_data_tool/import/master_data_file_list.rb
198
197
  - lib/master_data_tool/master_data.rb
198
+ - lib/master_data_tool/master_data_collection.rb
199
199
  - lib/master_data_tool/master_data_file.rb
200
+ - lib/master_data_tool/master_data_file_collection.rb
200
201
  - lib/master_data_tool/master_data_status.rb
201
202
  - lib/master_data_tool/report.rb
202
203
  - lib/master_data_tool/report/core.rb
@@ -207,6 +208,8 @@ files:
207
208
  - lib/master_data_tool/report/verify_report.rb
208
209
  - lib/master_data_tool/version.rb
209
210
  - log/test.log
211
+ - master_data_tool.gemspec
212
+ - scripts/drop_db.sh
210
213
  - scripts/setup.sh
211
214
  - sig/master_data_tool.rbs
212
215
  homepage: https://github.com/taka0125/master_data_tool
@@ -229,7 +232,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
229
232
  - !ruby/object:Gem::Version
230
233
  version: '0'
231
234
  requirements: []
232
- rubygems_version: 3.0.3
235
+ rubygems_version: 3.3.7
233
236
  signing_key:
234
237
  specification_version: 4
235
238
  summary: マスタデータの管理ツール
@@ -1,43 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module MasterDataTool
4
- module Import
5
- class MasterDataFileList
6
- Result = Struct.new(:table_name, :path, :override_identifier)
7
-
8
- def initialize(override_identifier: nil)
9
- @override_identifier = override_identifier
10
- end
11
-
12
- def build
13
- files = extract_master_data_csv_paths.presence&.index_by(&:table_name)
14
- overridden_files = overridden_master_data_csv_paths.presence&.index_by(&:table_name) || {}
15
-
16
- table_names = (files.keys + overridden_files.keys).uniq
17
- table_names.map do |table_name|
18
- overridden_files[table_name] || files[table_name]
19
- end
20
- end
21
-
22
- private
23
-
24
- def extract_master_data_csv_paths
25
- pattern = Pathname.new(MasterDataTool.config.master_data_dir).join('*.csv').to_s
26
- Pathname.glob(pattern).select(&:file?).map do |path|
27
- table_name = MasterDataTool.resolve_table_name(path, nil)
28
- MasterDataTool::MasterDataFile.new(table_name, path, nil)
29
- end
30
- end
31
-
32
- def overridden_master_data_csv_paths
33
- return [] if @override_identifier.blank?
34
-
35
- pattern = Pathname.new(MasterDataTool.config.master_data_dir).join(@override_identifier).join('*.csv').to_s
36
- Pathname.glob(pattern).select(&:file?).map do |path|
37
- table_name = MasterDataTool.resolve_table_name(path, @override_identifier)
38
- MasterDataTool::MasterDataFile.new(table_name, path, @override_identifier)
39
- end
40
- end
41
- end
42
- end
43
- end