eitje_s3 1.0.0 → 1.0.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b3fab11f169aa044513495cd47f13853ec305a31fe72e5075b681f042d9712c8
4
- data.tar.gz: 464f079e85d4e7df5373d70732ba93852ff9489b3d543404eee769d6d5e86dec
3
+ metadata.gz: 2807a3b8cb433040b1e0e32add53c9ad8cd6bdb01912067fb8466b352500e3f5
4
+ data.tar.gz: 6cf07414f5ad6db0e99f95b06917d87d109a3fecb8e0c2b2b032b420ddd2f022
5
5
  SHA512:
6
- metadata.gz: a8c6436222d6708cbe69ae3d4d38e97c6a015db22e870639520ea05ee36ad721fadb7cae6eac3daeb53a5649035ead3562d654865f785c2f2e8e77468b2647cf
7
- data.tar.gz: 47f1b3af7e66c4740d8a4fcba65d824096e4287a0b22d122d2e132fbc37b660e573ac18ff3c38b72e1a0539dbf8540c792fcab032f9e1ebe77efc0ad0c399319
6
+ metadata.gz: 76182141a7f669de16ed8f9de9740c1714f10cb0d8a7deb0822018b81921a0d5e448cb5e95ef1671264ab5ec47232f3c111fe71237e5d56ef403713c51240f72
7
+ data.tar.gz: c5c9450d3d14c3e5ada2a1a3c940c6876f179bbc1525f50bbb669b9bd3245e645eb41645a94c0d64ed2d38c00ac9e4259d78981100f8275a3c1fdd0b06a1c80f
@@ -1,13 +1,7 @@
1
1
  module S3::NewDeletedRecordsService
2
2
  class << self
3
3
 
4
- DB_TABLES = %w$ shifts teams users contracts infos posts $
5
-
6
- def test(table)
7
- S3::NewDeletedRecordsService.get_records(
8
- db_table: table, start_date: '2021-03-28', end_date: '2021-04-3', env_id: 513
9
- )
10
- end
4
+ DB_TABLES = %w$ shifts teams users contracts infos posts topics $
11
5
 
12
6
  def get_records(db_table:, start_date:, end_date:, env_id:)
13
7
  @date_range = Date.parse(start_date)..Date.parse(end_date)
@@ -32,7 +26,13 @@ module S3::NewDeletedRecordsService
32
26
  end
33
27
 
34
28
  def query_records
35
- file = @s3.get_object(bucket: 'eitje-deleted-jurr', key: @file_name)
29
+
30
+ # Previously (before adding 'topics') the request bucket was 'eitje-deleted-jurr'
31
+ # but somehow topics break if we dont request the bucket '-2'. Now for other tables
32
+ # the original returns waaaaay many records, so probably does not filter by date or
33
+ # something. Change for now and investigate if shit goes BG.
34
+
35
+ file = @s3.get_object(bucket: 'eitje-deleted-jurr-2', key: @file_name)
36
36
  @records = JSON.parse(file.body.read.as_json).map(&:symbolize_keys)
37
37
  end
38
38
 
@@ -1,9 +1,7 @@
1
1
  module S3::OldDeletedRecordsService
2
2
  class << self
3
3
 
4
- DB_TABLES = %w$ shifts teams users contracts infos posts $
5
-
6
- # S3::OldDeletedRecordsService.get_records(env_id: 307, env_name: 'Kua - Den Haag', db_table: 'verlofverzoeks', start_date: '2021-02-01', end_date: '2021-04-01')
4
+ DB_TABLES = %w$ shifts teams users contracts infos posts topics $
7
5
 
8
6
  def get_records(db_table:, start_date:, end_date:, env_id:, env_name:)
9
7
 
@@ -20,7 +18,6 @@ module S3::OldDeletedRecordsService
20
18
  @file_names_filtered_by_date = filter_by_date
21
19
 
22
20
  read_all_files
23
-
24
21
  end
25
22
 
26
23
  # validations
@@ -36,7 +33,15 @@ module S3::OldDeletedRecordsService
36
33
 
37
34
  @file_name = file_name
38
35
  @file = request_object
39
- @db_table == 'users' ? filter_users_table_by_env : filter_file_by_env
36
+
37
+ case @db_table
38
+ when 'users'
39
+ filter_users_table_by_env
40
+ when 'topics'
41
+ filter_topics_table_by_env
42
+ else
43
+ filter_file_by_env
44
+ end
40
45
 
41
46
  end.flatten
42
47
  end
@@ -59,6 +64,10 @@ module S3::OldDeletedRecordsService
59
64
  @file.select { |row| row[:envs].include? @env_name }
60
65
  end
61
66
 
67
+ def filter_topics_table_by_env
68
+ @file.select { |row| row[:environment_ids]&.include?(@env_id) }
69
+ end
70
+
62
71
  def filter_by_date
63
72
  @file_names_filtered_by_table.select { |file_name| @date_range.include?(get_date(file_name)) }
64
73
  end
@@ -5,24 +5,61 @@ module S3::TransformDeletedFilesService
5
5
 
6
6
  BUCKET = 'eitje-deleted-jurr-2'
7
7
 
8
- def migrate_files(start_date: '2019-07-18')
8
+ def set_setters(start_date = Date.yesterday)
9
9
  @start_date = start_date
10
-
11
10
  set_logger
12
11
  set_bucket
13
12
  set_tables
14
13
  set_dates(start_date)
14
+ end
15
+
16
+ def migrate_files(start_date: Date.today)
17
+ set_setters(start_date)
18
+
19
+ s3 = Aws::S3::Client.new
20
+ envs_to_migrate = []
15
21
 
16
- Environment.find_each do |env|
17
- @env = env
22
+ set_tables.each do |table|
23
+ object = s3.get_object(bucket: 'eitje-backups', key: "#{table}/#{start_date.strftime("%Y-%m-%d")}.json")
24
+ json = JSON.parse(object.body.read.as_json).map(&:symbolize_keys)
18
25
 
19
- @tables.each do |table|
20
- @table = table
21
- compose_file
26
+ if table == 'topics'
27
+ env_ids = json.map {|row| row[:environment_ids]}.flatten.compact.uniq
28
+ else
29
+ env_ids = json.map {|row| row[:env]}.uniq.map { |name| Environment.find_by(naam: name)&.id }
22
30
  end
31
+
32
+ envs_to_migrate << env_ids
33
+ rescue => e
34
+ # in case the file does not exist on S3, cause there are no deleted
35
+ # records, skip to next table
36
+ next
37
+ end
38
+
39
+ envs_to_migrate = envs_to_migrate.flatten.uniq.compact
40
+
41
+ envs_to_migrate.each { |env_id| migrate_files_single_env(env_id, start_date: start_date, skip_setters: true) }
42
+ end
43
+
44
+ def migrate_files_single_env(environment_id, start_date: Date.yesterday, skip_setters: false)
45
+ set_setters(start_date) unless skip_setters
46
+ @env = Environment.find(environment_id)
47
+ @tables.each do |table|
48
+ @table = table
49
+ compose_file
23
50
  end
24
51
  end
25
52
 
53
+ def migrate_files_multi_env(environment_ids, start_date: Date.yesterday)
54
+ set_setters(start_date)
55
+ environment_ids.each { |id| migrate_files_single_env(id, start_date: start_date, skip_setters: true) }
56
+ end
57
+
58
+ def migrate_files_single_org(organisation_id, start_date: Date.yesterday)
59
+ env_ids = Organisation.find(organisation_id).environment_ids
60
+ migrate_files_multi_env(env_ids, start_date: start_date)
61
+ end
62
+
26
63
  def set_logger
27
64
  @logger = Logger.new "log/migrate_deleted_records_#{DateTime.now.strftime('%Y_%m_%d_%H:%M:%S')}.log"
28
65
  end
@@ -32,8 +69,7 @@ module S3::TransformDeletedFilesService
32
69
  end
33
70
 
34
71
  def set_tables
35
- # @tables = S3::OldDeletedRecordsService::singleton_class::DB_TABLES
36
- @tables = ['verlof_verzoeken']
72
+ @tables = S3::OldDeletedRecordsService::singleton_class::DB_TABLES
37
73
  end
38
74
 
39
75
  def set_dates(start_date)
@@ -46,7 +82,7 @@ module S3::TransformDeletedFilesService
46
82
  end
47
83
 
48
84
  def set_records
49
- @records = S3::OldDeletedRecordsService.get_records(env_id: @env.id, env_name: @env.naam, db_table: 'verlofverzoeks', **@dates)
85
+ @records = S3::OldDeletedRecordsService.get_records(env_id: @env.id, env_name: @env.naam, db_table: @table, **@dates)
50
86
  end
51
87
 
52
88
  def set_json
@@ -72,7 +108,8 @@ module S3::TransformDeletedFilesService
72
108
 
73
109
  (@records += @existing_records) if @existing_records
74
110
  set_json
75
- upload_file
111
+ upload_file
112
+
76
113
  rescue => e
77
114
  @logger.error "Error for env #{@env.naam} (##{@env.id}) with table '#{@table}' => #{e.class}: #{e.message}.\n\nBacktrace:#{e.backtrace}\n"
78
115
  end
data/lib/s3/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module S3
2
- VERSION = '1.0.0'
2
+ VERSION = '1.0.4'
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: eitje_s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0
4
+ version: 1.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jurriaan Schrofer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-06-28 00:00:00.000000000 Z
11
+ date: 2021-08-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails