maisaedu-poormans-dms 1.0.57__tar.gz → 1.0.59__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/PKG-INFO +1 -1
  2. maisaedu-poormans-dms-1.0.57/maisaedu_poormans_dms/redshift_migration/MigratorRedshiftConnector.py → maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Connector.py +8 -2
  3. maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/MigratorRedshift.py +68 -0
  4. maisaedu-poormans-dms-1.0.57/maisaedu_poormans_dms/redshift_migration/MigratorRedshiftReader.py → maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Reader.py +43 -10
  5. maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Services/ExtractionOperation.py +31 -0
  6. maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Services/Struct.py +93 -0
  7. maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Types.py +22 -0
  8. maisaedu-poormans-dms-1.0.57/maisaedu_poormans_dms/redshift_migration/MigratorRedshiftWriter.py → maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Writer.py +36 -13
  9. maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/__init__.py +0 -0
  10. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms.egg-info/PKG-INFO +1 -1
  11. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms.egg-info/SOURCES.txt +8 -4
  12. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/setup.py +1 -1
  13. maisaedu-poormans-dms-1.0.57/maisaedu_poormans_dms/redshift_migration/MigratorRedshift.py +0 -123
  14. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/LICENSE +0 -0
  15. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/__init__.py +0 -0
  16. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/MigratorInterface.py +0 -0
  17. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/MigratorRowInterface.py +0 -0
  18. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/MigratorRowReaderInterface.py +0 -0
  19. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/MigratorRowWriterInterface.py +0 -0
  20. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/MigratorTableInterface.py +0 -0
  21. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Contracts/__init__.py +0 -0
  22. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/Migrator.py +0 -0
  23. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/MigratorRow/MigratorRow.py +0 -0
  24. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/MigratorRow/Reader.py +0 -0
  25. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/MigratorRow/Writer.py +0 -0
  26. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/MigratorRow/__init__.py +0 -0
  27. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/MigratorTable.py +0 -0
  28. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/postgres_migration/__init__.py +0 -0
  29. {maisaedu-poormans-dms-1.0.57/maisaedu_poormans_dms/redshift_migration → maisaedu-poormans-dms-1.0.59/maisaedu_poormans_dms/redshift_migration/Services}/__init__.py +0 -0
  30. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms/sql_server_migration.py +0 -0
  31. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms.egg-info/dependency_links.txt +0 -0
  32. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms.egg-info/requires.txt +0 -0
  33. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/maisaedu_poormans_dms.egg-info/top_level.txt +0 -0
  34. {maisaedu-poormans-dms-1.0.57 → maisaedu-poormans-dms-1.0.59}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: maisaedu-poormans-dms
3
- Version: 1.0.57
3
+ Version: 1.0.59
4
4
  Summary: A library for making database migration tasks, for +A Education
5
5
  Home-page: UNKNOWN
6
6
  Author: A+ Educação
@@ -3,9 +3,10 @@ import psycopg2
3
3
  from sqlalchemy import create_engine
4
4
 
5
5
  from maisaedu_utilities_prefect.dw import get_red_credentials
6
+ from .Types import DEV, PROD, LOCAL
6
7
 
7
8
 
8
- class MigratorRedshiftConnector:
9
+ class Connector:
9
10
  def __init__(
10
11
  self,
11
12
  env,
@@ -18,7 +19,12 @@ class MigratorRedshiftConnector:
18
19
  self.iam_role = "arn:aws:iam::977647303146:role/service-role/AmazonRedshift-CommandsAccessRole-20220714T104138"
19
20
 
20
21
  def connect_target(self):
21
- red_credentials = get_red_credentials(self.env)
22
+ if self.env == LOCAL:
23
+ env = DEV
24
+ else:
25
+ env = self.env
26
+
27
+ red_credentials = get_red_credentials(env)
22
28
  self.target_conn = psycopg2.connect(
23
29
  host=red_credentials["host"],
24
30
  database=red_credentials["database"],
@@ -0,0 +1,68 @@
1
+ from .Connector import Connector
2
+ from .Reader import Reader
3
+ from .Writer import Writer
4
+ from .Services.Struct import Struct
5
+ from .Types import check_if_env_is_valid, check_if_option_is_valid
6
+
7
+
8
+ class MigratorRedshift:
9
+ def __init__(
10
+ self,
11
+ env=None,
12
+ s3_credentials=None,
13
+ struct=None,
14
+ source_credentials=None,
15
+ ):
16
+ check_if_env_is_valid(env)
17
+
18
+ self.migrator_redshift_connector = Connector(
19
+ env=env,
20
+ s3_credentials=s3_credentials,
21
+ source_credentials=source_credentials,
22
+ )
23
+
24
+ self.migrator_redshift_reader = Reader(
25
+ s3_credentials=s3_credentials,
26
+ struct=struct,
27
+ migrator_redshift_connector=self.migrator_redshift_connector,
28
+ )
29
+
30
+ self.migrator_redshift_writer = Writer(
31
+ struct=struct,
32
+ migrator_redshift_connector=self.migrator_redshift_connector,
33
+ )
34
+
35
+ self.source_credentials = source_credentials
36
+ self.struct = struct
37
+ self.s3_credentials = s3_credentials
38
+ self.env = env
39
+
40
+ def check_target_table_has_data(self):
41
+ self.migrator_redshift_connector.connect_target()
42
+ sql = f"""
43
+ select count(*) from "{self.struct["target_schema"]}"."{self.struct["target_table"]}" limit 1
44
+ """
45
+ cursor = self.migrator_redshift_connector.target_conn.cursor()
46
+ cursor.execute(sql)
47
+ result = cursor.fetchall()
48
+ if result[0][0] == 0:
49
+ return False
50
+ else:
51
+ return True
52
+
53
+ def extract_to_redshift(self, load_option=None):
54
+ check_if_option_is_valid(load_option)
55
+
56
+ url = self.migrator_redshift_reader.save_data_to_s3(load_option)
57
+ return self.migrator_redshift_writer.save_to_redshift(url)
58
+
59
+ def get_structs_source_to_target(self, database, tables="all"):
60
+ self.migrator_redshift_connector.connect_target()
61
+ structs = Struct(conn=self.migrator_redshift_connector.target_conn).get(
62
+ database=database,
63
+ tables=tables,
64
+ )
65
+
66
+ self.migrator_redshift_connector.target_conn.close()
67
+
68
+ return structs
@@ -1,10 +1,11 @@
1
1
  import io
2
- import io
3
2
  import pandas as pd
4
3
  from datetime import datetime
4
+ from .Types import LOCAL, FULL, INCREMENTAL, SAVED_S3
5
+ from .Services.ExtractionOperation import ExtractionOperation
5
6
 
6
7
 
7
- class MigratorRedshiftReader:
8
+ class Reader:
8
9
  def __init__(self, s3_credentials, struct, migrator_redshift_connector):
9
10
  self.struct = struct
10
11
  self.s3_credentials = s3_credentials
@@ -17,6 +18,8 @@ class MigratorRedshiftReader:
17
18
  "text": "str",
18
19
  "timestamp": "datetime64[ns]",
19
20
  "super": "str",
21
+ "int": pd.Int64Dtype(),
22
+ "bigint": pd.Int64Dtype(),
20
23
  }
21
24
 
22
25
  if c["target_type"] in case_target_type.keys():
@@ -25,7 +28,6 @@ class MigratorRedshiftReader:
25
28
  )
26
29
  if case_target_type[c["target_type"]] == "str":
27
30
  df[c["source_name"]].replace("None", "", inplace=True)
28
-
29
31
  return df
30
32
 
31
33
  def get_incremental_statement(self):
@@ -34,11 +36,12 @@ class MigratorRedshiftReader:
34
36
  and self.struct["source_incremental_column"] is not None
35
37
  and "target_incremental_column" in self.struct.keys()
36
38
  and self.struct["target_incremental_column"] is not None
39
+ and (self.load_option is None)
37
40
  ):
38
41
  self.migrator_redshift_connector.connect_target()
39
42
  sql = f"""
40
43
  select max("{self.struct["target_incremental_column"]}") as max_value
41
- from {self.struct["target_relation"]}
44
+ from "{self.struct["target_schema"]}"."{self.struct["target_table"]}"
42
45
  """
43
46
 
44
47
  cursor = self.migrator_redshift_connector.target_conn.cursor()
@@ -48,6 +51,7 @@ class MigratorRedshiftReader:
48
51
 
49
52
  if len(result) == 0 or result[0][0] is None:
50
53
  sql_return = ""
54
+ self.load_option = FULL
51
55
  else:
52
56
  for c in self.struct["columns"]:
53
57
  if c["target_name"] == self.struct["target_incremental_column"]:
@@ -62,14 +66,22 @@ class MigratorRedshiftReader:
62
66
  ):
63
67
  sql_return = f'and "{self.struct["source_incremental_column"]}" > {result[0][0]}'
64
68
  else:
65
- sql_return = f'and "{self.struct["source_incremental_column"]}" > \'{result[0][0]}\''
69
+ if (
70
+ self.struct["incremental_interval_delta"] is None
71
+ or self.struct["incremental_interval_delta"] == ""
72
+ ):
73
+ sql_return = f'and "{self.struct["source_incremental_column"]}" > \'{result[0][0]}\''
74
+ else:
75
+ sql_return = f'and "{self.struct["source_incremental_column"]}" >= \'{result[0][0]}\'::timestamp - interval \'{self.struct["incremental_interval_delta"]}\''
76
+
77
+ self.load_option = INCREMENTAL
66
78
 
67
79
  cursor.close()
68
80
  self.migrator_redshift_connector.target_conn.close()
69
81
 
70
82
  return sql_return
71
-
72
83
  else:
84
+ self.load_option = FULL
73
85
  return ""
74
86
 
75
87
  def get_columns_source(self):
@@ -96,18 +108,26 @@ class MigratorRedshiftReader:
96
108
  else:
97
109
  return ""
98
110
 
111
+ def get_limit_sql_statement(self):
112
+ if self.migrator_redshift_connector.env == LOCAL:
113
+ return f" limit 100"
114
+ else:
115
+ return f""
116
+
99
117
  def get_sql_statement(self):
100
118
  sql = f"""
101
119
  select {self.get_columns_source()}
102
- from {self.struct["source_relation"]}
120
+ from "{self.struct["source_schema"]}"."{self.struct["source_table"]}"
103
121
  where 1=1
104
122
  {self.get_incremental_statement()}
105
123
  {self.get_order_by_sql_statement()}
124
+ {self.get_limit_sql_statement()}
106
125
  """
107
126
  print(f"SQL Statement: {sql}")
108
127
  return sql
109
128
 
110
- def save_data_to_s3(self):
129
+ def save_data_to_s3(self, load_option=None):
130
+ self.load_option = load_option
111
131
  self.migrator_redshift_connector.connect_s3()
112
132
  self.migrator_redshift_connector.connect_source()
113
133
 
@@ -123,7 +143,7 @@ class MigratorRedshiftReader:
123
143
  chunksize=self.struct["read_batch_size"],
124
144
  ):
125
145
  if len(chunk_df) != 0:
126
- path_file = f'{self.s3_credentials["path"]}{self.struct["database"]}/{self.struct["source_relation"]}/{time}/{idx}.parquet'
146
+ path_file = f'raw/prefect/{self.migrator_redshift_connector.env}/{self.struct["database"]}/{self.struct["source_schema"]}/{self.struct["source_table"]}/{time}/{idx}.parquet'
127
147
  print(f"Saving file {path_file}")
128
148
 
129
149
  buffer = io.BytesIO()
@@ -143,4 +163,17 @@ class MigratorRedshiftReader:
143
163
  if path_file is None:
144
164
  return None
145
165
  else:
146
- return f's3://{self.s3_credentials["bucket"]}/{self.s3_credentials["path"]}{self.struct["database"]}/{self.struct["source_relation"]}/{time}/'
166
+ url = f's3://{self.s3_credentials["bucket"]}/raw/prefect/{self.migrator_redshift_connector.env}/{self.struct["database"]}/{self.struct["source_schema"]}/{self.struct["source_table"]}/{time}/'
167
+
168
+ self.migrator_redshift_connector.connect_target()
169
+ ExtractionOperation(
170
+ conn=self.migrator_redshift_connector.target_conn,
171
+ ).create(
172
+ relation_id=self.struct["id"],
173
+ url=url,
174
+ load_option=self.load_option,
175
+ status=SAVED_S3,
176
+ )
177
+ self.migrator_redshift_connector.close_target()
178
+
179
+ return url
@@ -0,0 +1,31 @@
1
+ class ExtractionOperation:
2
+ def __init__(self, conn=None):
3
+ self.conn = conn
4
+
5
+ def create(self, relation_id, url, load_option, status):
6
+ cursor = self.conn.cursor()
7
+
8
+ sql = f"""
9
+ insert into dataeng.relations_extraction_operations
10
+ (relation_id, url, option, status, created_at, updated_at)
11
+ values
12
+ ({relation_id}, '{url}', '{load_option}', '{status}', 'now()', 'now()')
13
+ """
14
+
15
+ cursor.execute(sql)
16
+
17
+ self.conn.commit()
18
+ cursor.close()
19
+
20
+ def update(self, url, status):
21
+ cursor = self.conn.cursor()
22
+
23
+ sql = f"""
24
+ update dataeng.relations_extraction_operations
25
+ set status = '{status}', updated_at = 'now()'
26
+ where url = '{url}'
27
+ """
28
+
29
+ cursor.execute(sql)
30
+ self.conn.commit()
31
+ cursor.close()
@@ -0,0 +1,93 @@
1
+ class Struct:
2
+ def __init__(self, conn=None):
3
+ self.conn = conn
4
+
5
+ def get(self, database, tables="all"):
6
+ cursor = self.conn.cursor()
7
+
8
+ statement = f" and database = '{database}'"
9
+
10
+ if tables != "all":
11
+ statement = f"""
12
+ {statement} and target_relation in ({tables})
13
+ """
14
+ else:
15
+ statement = f" {statement} and is_active is true"
16
+
17
+ cursor.execute(
18
+ f"""
19
+ select
20
+ id,
21
+ source_schema,
22
+ source_table,
23
+ source_engine,
24
+ target_schema,
25
+ target_table,
26
+ source_incremental_column,
27
+ target_incremental_column,
28
+ read_batch_size,
29
+ incremental_interval_delta,
30
+ database,
31
+ extraction_engine
32
+ from
33
+ dataeng.relations_extraction
34
+ where
35
+ 1=1
36
+ {statement};
37
+ """
38
+ )
39
+
40
+ structs = []
41
+
42
+ relations_extraction = cursor.fetchall()
43
+ for r in relations_extraction:
44
+ s = {
45
+ "id": r[0],
46
+ "source_schema": r[1],
47
+ "source_table": r[2],
48
+ "source_engine": r[3],
49
+ "target_schema": r[4],
50
+ "target_table": r[5],
51
+ "source_incremental_column": r[6],
52
+ "target_incremental_column": r[7],
53
+ "read_batch_size": r[8],
54
+ "incremental_interval_delta": r[9],
55
+ "database": r[10],
56
+ "extraction_engine": r[11],
57
+ "columns": [],
58
+ "columns_upsert": [],
59
+ }
60
+ cursor.execute(
61
+ f"""
62
+ select
63
+ source_name,
64
+ target_name,
65
+ source_type,
66
+ target_type,
67
+ is_upsert
68
+ from
69
+ dataeng.relations_colums_extraction
70
+ where
71
+ relation_id = {r[0]}
72
+ and is_active is true
73
+ order by source_order asc;
74
+ """
75
+ )
76
+ columns = cursor.fetchall()
77
+ for c in columns:
78
+ s["columns"].append(
79
+ {
80
+ "source_name": c[0],
81
+ "target_name": c[1],
82
+ "source_type": c[2],
83
+ "target_type": c[3],
84
+ }
85
+ )
86
+ if c[4] is True:
87
+ s["columns_upsert"].append(c[1])
88
+
89
+ structs.append(s)
90
+
91
+ cursor.close()
92
+
93
+ return structs
@@ -0,0 +1,22 @@
1
+ LOCAL = "local"
2
+ DEV = "dev"
3
+ PROD = "prod"
4
+
5
+ FULL = "full"
6
+ INCREMENTAL = "incremental"
7
+
8
+ PREFECT_DMS = "prefect-dms"
9
+ PREFECT = "prefect"
10
+
11
+ SAVED_S3 = "saved-s3"
12
+ SAVED_REDSHIFT = "saved-redshift"
13
+
14
+
15
+ def check_if_env_is_valid(env):
16
+ if env not in [LOCAL, DEV, PROD]:
17
+ raise ValueError("env must be 'local', 'dev' or 'prod'")
18
+
19
+
20
+ def check_if_option_is_valid(option):
21
+ if option not in [FULL, INCREMENTAL, None]:
22
+ raise ValueError("option must be 'full' or 'incremental'")
@@ -1,4 +1,8 @@
1
- class MigratorRedshiftWriter:
1
+ from .Types import SAVED_REDSHIFT
2
+ from .Services.ExtractionOperation import ExtractionOperation
3
+
4
+
5
+ class Writer:
2
6
  def __init__(self, struct, migrator_redshift_connector):
3
7
  self.struct = struct
4
8
  self.migrator_redshift_connector = migrator_redshift_connector
@@ -9,23 +13,30 @@ class MigratorRedshiftWriter:
9
13
  return "SERIALIZETOJSON"
10
14
  return ""
11
15
 
12
- def create_statement_upsert(self, temp_target_relation):
16
+ def create_statement_upsert(self, target_relation, temp_target_relation):
13
17
  statement_upsert = ""
14
18
  for c in self.struct["columns_upsert"]:
15
19
  statement_upsert = (
16
20
  statement_upsert
17
- + f'and {self.struct["target_relation"]}.{c} = {temp_target_relation}.{c} '
21
+ + f"""
22
+ and {target_relation}."{c}" = {temp_target_relation}."{c}"
23
+ """
18
24
  )
19
25
 
20
26
  return statement_upsert
21
27
 
22
28
  def save_data(self, is_upsert, target_cursor, path_files_to_insert):
23
- temp_target_relation = f'"temp_{self.struct["target_relation"]}"'
29
+ temp_target_relation = (
30
+ f'"temp_{self.struct["target_schema"]}_{self.struct["target_table"]}"'
31
+ )
32
+ target_relation = (
33
+ f'"{self.struct["target_schema"]}"."{self.struct["target_table"]}"'
34
+ )
24
35
 
25
36
  target_cursor.execute(
26
37
  f"""
27
- CREATE TEMP TABLE {temp_target_relation} (LIKE {self.struct["target_relation"]});
28
- """
38
+ CREATE TEMP TABLE {temp_target_relation} (LIKE {target_relation});
39
+ """
29
40
  )
30
41
 
31
42
  target_cursor.execute(
@@ -42,34 +53,38 @@ class MigratorRedshiftWriter:
42
53
  if is_upsert is True:
43
54
  target_cursor.execute(
44
55
  f"""
45
- DELETE FROM {self.struct["target_relation"]}
56
+ DELETE FROM {target_relation}
46
57
  USING {temp_target_relation}
47
58
  WHERE 1=1
48
- {self.create_statement_upsert(temp_target_relation)}
59
+ {self.create_statement_upsert(target_relation, temp_target_relation)}
49
60
  ;
50
61
  """
51
62
  )
52
63
  else:
53
64
  target_cursor.execute(
54
65
  f"""
55
- DELETE FROM {self.struct["target_relation"]};
66
+ DELETE FROM {target_relation};
56
67
  """
57
68
  )
58
69
 
59
70
  target_cursor.execute(
60
71
  f"""
61
- INSERT INTO {self.struct["target_relation"]}
72
+ INSERT INTO {target_relation}
62
73
  SELECT * FROM {temp_target_relation};
63
74
  """
64
75
  )
65
76
 
66
77
  self.migrator_redshift_connector.target_conn.commit()
67
78
 
68
- target_cursor.execute(f"""DROP TABLE {temp_target_relation};""")
79
+ target_cursor.execute(
80
+ f"""
81
+ DROP TABLE {temp_target_relation};
82
+ """
83
+ )
69
84
 
70
85
  self.migrator_redshift_connector.target_conn.commit()
71
86
 
72
- def save_to_redshift(self, path_files_to_insert):
87
+ def save_to_redshift(self, url):
73
88
  self.migrator_redshift_connector.connect_target()
74
89
  cursor = self.migrator_redshift_connector.target_conn.cursor()
75
90
 
@@ -84,9 +99,17 @@ class MigratorRedshiftWriter:
84
99
 
85
100
  self.save_data(
86
101
  target_cursor=cursor,
87
- path_files_to_insert=path_files_to_insert,
102
+ path_files_to_insert=url,
88
103
  is_upsert=is_upsert,
89
104
  )
90
105
 
91
106
  cursor.close()
107
+
108
+ ExtractionOperation(
109
+ conn=self.migrator_redshift_connector.target_conn,
110
+ ).update(
111
+ url=url,
112
+ status=SAVED_REDSHIFT,
113
+ )
114
+
92
115
  self.migrator_redshift_connector.target_conn.close()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: maisaedu-poormans-dms
3
- Version: 1.0.57
3
+ Version: 1.0.59
4
4
  Summary: A library for making database migration tasks, for +A Education
5
5
  Home-page: UNKNOWN
6
6
  Author: A+ Educação
@@ -20,8 +20,12 @@ maisaedu_poormans_dms/postgres_migration/MigratorRow/MigratorRow.py
20
20
  maisaedu_poormans_dms/postgres_migration/MigratorRow/Reader.py
21
21
  maisaedu_poormans_dms/postgres_migration/MigratorRow/Writer.py
22
22
  maisaedu_poormans_dms/postgres_migration/MigratorRow/__init__.py
23
+ maisaedu_poormans_dms/redshift_migration/Connector.py
23
24
  maisaedu_poormans_dms/redshift_migration/MigratorRedshift.py
24
- maisaedu_poormans_dms/redshift_migration/MigratorRedshiftConnector.py
25
- maisaedu_poormans_dms/redshift_migration/MigratorRedshiftReader.py
26
- maisaedu_poormans_dms/redshift_migration/MigratorRedshiftWriter.py
27
- maisaedu_poormans_dms/redshift_migration/__init__.py
25
+ maisaedu_poormans_dms/redshift_migration/Reader.py
26
+ maisaedu_poormans_dms/redshift_migration/Types.py
27
+ maisaedu_poormans_dms/redshift_migration/Writer.py
28
+ maisaedu_poormans_dms/redshift_migration/__init__.py
29
+ maisaedu_poormans_dms/redshift_migration/Services/ExtractionOperation.py
30
+ maisaedu_poormans_dms/redshift_migration/Services/Struct.py
31
+ maisaedu_poormans_dms/redshift_migration/Services/__init__.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="maisaedu-poormans-dms",
5
- version="1.0.57",
5
+ version="1.0.59",
6
6
  description="A library for making database migration tasks, for +A Education",
7
7
  license="MIT License",
8
8
  author="A+ Educação",
@@ -1,123 +0,0 @@
1
- from .MigratorRedshiftConnector import MigratorRedshiftConnector
2
- from .MigratorRedshiftReader import MigratorRedshiftReader
3
- from .MigratorRedshiftWriter import MigratorRedshiftWriter
4
-
5
-
6
- class MigratorRedshift:
7
- def __init__(
8
- self,
9
- env=None,
10
- s3_credentials=None,
11
- struct=None,
12
- source_credentials=None,
13
- ):
14
- self.migrator_redshift_connector = MigratorRedshiftConnector(
15
- env=env,
16
- s3_credentials=s3_credentials,
17
- source_credentials=source_credentials,
18
- )
19
-
20
- self.migrator_redshift_reader = MigratorRedshiftReader(
21
- s3_credentials=s3_credentials,
22
- struct=struct,
23
- migrator_redshift_connector=self.migrator_redshift_connector,
24
- )
25
-
26
- self.migrator_redshift_writer = MigratorRedshiftWriter(
27
- struct=struct,
28
- migrator_redshift_connector=self.migrator_redshift_connector,
29
- )
30
-
31
- self.source_credentials = source_credentials
32
- self.struct = struct
33
- self.s3_credentials = s3_credentials
34
- self.env = env
35
-
36
- def save_data_to_s3(self):
37
- return self.migrator_redshift_reader.save_data_to_s3()
38
-
39
- def save_to_redshift(self, path_files_to_insert):
40
- return self.migrator_redshift_writer.save_to_redshift(path_files_to_insert)
41
-
42
- def get_structs_source_to_target(self, database, tables="all"):
43
- self.migrator_redshift_connector.connect_target()
44
-
45
- statement = f" and database = '{database}'"
46
-
47
- if tables != "all":
48
- statement = f"""
49
- {statement} and id in ({tables})
50
- """
51
- else:
52
- statement = f" {statement} and is_active is true"
53
-
54
- cur = self.migrator_redshift_connector.target_conn.cursor()
55
- cur.execute(
56
- f"""
57
- select
58
- id,
59
- source_relation,
60
- source_engine,
61
- target_relation,
62
- source_incremental_column,
63
- target_incremental_column,
64
- read_batch_size,
65
- incremental_interval_delta,
66
- database
67
- from
68
- dataeng.relations_extraction
69
- where
70
- 1=1
71
- {statement};
72
- """
73
- )
74
-
75
- structs = []
76
-
77
- relations_extraction = cur.fetchall()
78
- for r in relations_extraction:
79
- s = {
80
- "source_relation": r[1],
81
- "source_engine": r[2],
82
- "target_relation": r[3],
83
- "source_incremental_column": r[4],
84
- "target_incremental_column": r[5],
85
- "read_batch_size": r[6],
86
- "incremental_interval_delta": r[7],
87
- "database": r[8],
88
- "columns": [],
89
- "columns_upsert": [],
90
- }
91
- cur.execute(
92
- f"""
93
- select
94
- source_name,
95
- target_name,
96
- source_type,
97
- target_type,
98
- is_upsert
99
- from
100
- dataeng.relations_colums_extraction
101
- where
102
- relation_id = {r[0]}
103
- order by source_order asc;
104
- """
105
- )
106
- columns = cur.fetchall()
107
- for c in columns:
108
- s["columns"].append(
109
- {
110
- "source_name": c[0],
111
- "target_name": c[1],
112
- "source_type": c[2],
113
- "target_type": c[3],
114
- }
115
- )
116
- if c[4] is True:
117
- s["columns_upsert"].append(c[1])
118
-
119
- structs.append(s)
120
-
121
- self.migrator_redshift_connector.close_target()
122
-
123
- return structs