eitje_s3 1.0.3 → 1.0.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '06915019d41e0d0790ff75cc9478cb3a94aa9c2a3d203861326e4408e6e883bc'
4
- data.tar.gz: 423a60af37a589c1aa57c088a241818bea588938dd935a8133625c6ebc84a038
3
+ metadata.gz: 2807a3b8cb433040b1e0e32add53c9ad8cd6bdb01912067fb8466b352500e3f5
4
+ data.tar.gz: 6cf07414f5ad6db0e99f95b06917d87d109a3fecb8e0c2b2b032b420ddd2f022
5
5
  SHA512:
6
- metadata.gz: 50a91cdacc1e1b7d33c74920213930a13b3c61adb75e2567d047462336738d40aab549e48c04f4ab068efa13061f64cf7f3aac54b35ca48a8a0832edd6a6e7c5
7
- data.tar.gz: e09bb1c05cf7b35c14cb678cdd897494cae2ed1a3a003dfb4654b19fe316ea266bddf7e6bd89107c16c9523f8ddc1f6a52e082d65750a829b3af155299601272
6
+ metadata.gz: 76182141a7f669de16ed8f9de9740c1714f10cb0d8a7deb0822018b81921a0d5e448cb5e95ef1671264ab5ec47232f3c111fe71237e5d56ef403713c51240f72
7
+ data.tar.gz: c5c9450d3d14c3e5ada2a1a3c940c6876f179bbc1525f50bbb669b9bd3245e645eb41645a94c0d64ed2d38c00ac9e4259d78981100f8275a3c1fdd0b06a1c80f
@@ -1,13 +1,7 @@
1
1
  module S3::NewDeletedRecordsService
2
2
  class << self
3
3
 
4
- DB_TABLES = %w$ shifts teams users contracts infos posts $
5
-
6
- def test(table)
7
- S3::NewDeletedRecordsService.get_records(
8
- db_table: table, start_date: '2021-03-28', end_date: '2021-04-3', env_id: 513
9
- )
10
- end
4
+ DB_TABLES = %w$ shifts teams users contracts infos posts topics $
11
5
 
12
6
  def get_records(db_table:, start_date:, end_date:, env_id:)
13
7
  @date_range = Date.parse(start_date)..Date.parse(end_date)
@@ -32,7 +26,13 @@ module S3::NewDeletedRecordsService
32
26
  end
33
27
 
34
28
  def query_records
35
- file = @s3.get_object(bucket: 'eitje-deleted-jurr', key: @file_name)
29
+
30
+ # Previously (before adding 'topics') the request bucket was 'eitje-deleted-jurr'
31
+ # but somehow topics break if we dont request the bucket '-2'. Now for other tables
32
+ # the original returns waaaaay many records, so probably does not filter by date or
33
+ # something. Change for now and investigate if shit goes BG.
34
+
35
+ file = @s3.get_object(bucket: 'eitje-deleted-jurr-2', key: @file_name)
36
36
  @records = JSON.parse(file.body.read.as_json).map(&:symbolize_keys)
37
37
  end
38
38
 
@@ -3,8 +3,6 @@ module S3::OldDeletedRecordsService
3
3
 
4
4
  DB_TABLES = %w$ shifts teams users contracts infos posts topics $
5
5
 
6
- # S3::OldDeletedRecordsService.get_records(env_id: 307, env_name: 'Kua - Den Haag', db_table: 'verlofverzoeks', start_date: '2021-02-01', end_date: '2021-04-01')
7
-
8
6
  def get_records(db_table:, start_date:, end_date:, env_id:, env_name:)
9
7
 
10
8
  # validate_args(db_table)
@@ -20,7 +18,6 @@ module S3::OldDeletedRecordsService
20
18
  @file_names_filtered_by_date = filter_by_date
21
19
 
22
20
  read_all_files
23
-
24
21
  end
25
22
 
26
23
  # validations
@@ -36,7 +33,15 @@ module S3::OldDeletedRecordsService
36
33
 
37
34
  @file_name = file_name
38
35
  @file = request_object
39
- @db_table == 'users' ? filter_users_table_by_env : filter_file_by_env
36
+
37
+ case @db_table
38
+ when 'users'
39
+ filter_users_table_by_env
40
+ when 'topics'
41
+ filter_topics_table_by_env
42
+ else
43
+ filter_file_by_env
44
+ end
40
45
 
41
46
  end.flatten
42
47
  end
@@ -59,6 +64,10 @@ module S3::OldDeletedRecordsService
59
64
  @file.select { |row| row[:envs].include? @env_name }
60
65
  end
61
66
 
67
+ def filter_topics_table_by_env
68
+ @file.select { |row| row[:environment_ids]&.include?(@env_id) }
69
+ end
70
+
62
71
  def filter_by_date
63
72
  @file_names_filtered_by_table.select { |file_name| @date_range.include?(get_date(file_name)) }
64
73
  end
@@ -22,15 +22,23 @@ module S3::TransformDeletedFilesService
22
22
  set_tables.each do |table|
23
23
  object = s3.get_object(bucket: 'eitje-backups', key: "#{table}/#{start_date.strftime("%Y-%m-%d")}.json")
24
24
  json = JSON.parse(object.body.read.as_json).map(&:symbolize_keys)
25
- env_ids = json.map {|row| row[:env]}.uniq.map { |name| Environment.find_by(naam: name)&.id }
25
+
26
+ if table == 'topics'
27
+ env_ids = json.map {|row| row[:environment_ids]}.flatten.compact.uniq
28
+ else
29
+ env_ids = json.map {|row| row[:env]}.uniq.map { |name| Environment.find_by(naam: name)&.id }
30
+ end
31
+
26
32
  envs_to_migrate << env_ids
27
- rescue
33
+ rescue => e
28
34
  # in case the file does not exist on S3, cause there are no deleted
29
35
  # records, skip to next table
30
36
  next
31
37
  end
38
+
39
+ envs_to_migrate = envs_to_migrate.flatten.uniq.compact
32
40
 
33
- envs_to_migrate.flatten.uniq.each { |env_id| migrate_files_single_env(env_id, start_date: start_date, skip_setters: true) }
41
+ envs_to_migrate.each { |env_id| migrate_files_single_env(env_id, start_date: start_date, skip_setters: true) }
34
42
  end
35
43
 
36
44
  def migrate_files_single_env(environment_id, start_date: Date.yesterday, skip_setters: false)
@@ -100,7 +108,8 @@ module S3::TransformDeletedFilesService
100
108
 
101
109
  (@records += @existing_records) if @existing_records
102
110
  set_json
103
- upload_file
111
+ upload_file
112
+
104
113
  rescue => e
105
114
  @logger.error "Error for env #{@env.naam} (##{@env.id}) with table '#{@table}' => #{e.class}: #{e.message}.\n\nBacktrace:#{e.backtrace}\n"
106
115
  end
data/lib/s3/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module S3
2
- VERSION = '1.0.3'
2
+ VERSION = '1.0.4'
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: eitje_s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 1.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jurriaan Schrofer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-07-22 00:00:00.000000000 Z
11
+ date: 2021-08-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails