icsDataValidation 1.0.425__tar.gz → 1.0.428__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/PKG-INFO +1 -1
  2. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/comparison_service.py +2 -3
  3. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/azure_service.py +3 -3
  4. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/snowflake_service.py +1 -1
  5. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/sqlserver_service.py +14 -14
  6. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/teradata_service.py +4 -4
  7. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/result_service.py +53 -50
  8. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/PKG-INFO +1 -1
  9. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/MANIFEST.in +0 -0
  10. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/configuration.py +0 -0
  11. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/__init__.py +0 -0
  12. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/azure_connection_setup.py +0 -0
  13. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/databricks_connection_setup.py +0 -0
  14. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/exasol_connection_setup.py +0 -0
  15. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/oracle_connection_setup.py +0 -0
  16. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/snowflake_connection_setup.py +0 -0
  17. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/sqlserver_connection_setup.py +0 -0
  18. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/connection_setups/teradata_connection_setup.py +0 -0
  19. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/core/__init__.py +0 -0
  20. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/core/database_objects.py +0 -0
  21. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/core/object_comparison.py +0 -0
  22. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/input_parameters/__init__.py +0 -0
  23. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/input_parameters/testing_tool_params.py +0 -0
  24. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/main.py +0 -0
  25. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/output_parameters/__init__.py +0 -0
  26. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/output_parameters/result_params.py +0 -0
  27. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/__init__.py +0 -0
  28. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/__init__.py +0 -0
  29. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/databricks_hive_metastore_service.py +0 -0
  30. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/databricks_unity_catalog_service.py +0 -0
  31. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/exasol_service.py +0 -0
  32. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/database_services/oracle_service.py +0 -0
  33. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/initialization_service.py +0 -0
  34. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/system_service.py +0 -0
  35. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/services/testset_service.py +0 -0
  36. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/__init__.py +0 -0
  37. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/file_util.py +0 -0
  38. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/logger_util.py +0 -0
  39. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/pandas_util.py +0 -0
  40. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/parallelization_util.py +0 -0
  41. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation/utils/sql_util.py +0 -0
  42. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/SOURCES.txt +0 -0
  43. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/dependency_links.txt +0 -0
  44. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/not-zip-safe +0 -0
  45. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/requires.txt +0 -0
  46. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/icsDataValidation.egg-info/top_level.txt +0 -0
  47. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/pyproject.toml +0 -0
  48. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/setup.cfg +0 -0
  49. {icsdatavalidation-1.0.425 → icsdatavalidation-1.0.428}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: icsDataValidation
3
- Version: 1.0.425
3
+ Version: 1.0.428
4
4
  Summary: Add your description here
5
5
  Author-email: initions <ICSMC_EXT_PYPIORG@accenture.com>
6
6
  License: MIT
@@ -323,9 +323,8 @@ class ComparisonService(TestingToolParams):
323
323
  elif self.use_group_by_columns and self.migration_config["GROUP_BY_AGGREGATION"]["GROUP_BY_COLUMNS"]:
324
324
  logger.info(f"[{self.comp_id}] START Group-By-Comparison - with option 2 (group_by_columns defined as a list for all objects)")
325
325
  global_group_by_columns=self.migration_config["GROUP_BY_AGGREGATION"]["GROUP_BY_COLUMNS"]
326
- global_group_by_columns_in_object=[group_by_column for group_by_column in global_group_by_columns if group_by_column in self.result_params.intersection_columns_trgt_src]
327
- if global_group_by_columns_in_object:
328
- object_group_by_columns=[global_group_by_columns_in_object]
326
+ object_group_by_columns=[group_by_column for group_by_column in global_group_by_columns if group_by_column in self.result_params.intersection_columns_trgt_src]
327
+ if object_group_by_columns:
329
328
  object_group_by_aggregation_columns=["all"]
330
329
  object_group_by_aggregation_type='various'
331
330
 
@@ -309,10 +309,10 @@ class AzureService:
309
309
 
310
310
  try:
311
311
  for single_query in query_list:
312
+ query_result=self.azure_connection.execute(single_query)
313
+
312
314
  if return_as_pdf:
313
- query_result = pandas.io.sql.read_sql(single_query, self.azure_connection)
314
- else:
315
- query_result=self.azure_connection.execute(single_query)
315
+ query_result = pd.DataFrame(query_result)
316
316
 
317
317
  results.append(query_result)
318
318
 
@@ -556,7 +556,7 @@ class SnowflakeService:
556
556
  aggregates += f', COUNT(DISTINCT LOWER(TRY_TO_NUMBER({column_identifier}::VARCHAR))) AS "COUNTDISTINCT_{column}"'
557
557
 
558
558
  elif not only_numeric and column_datatype.lower() in self.snowflake_datatype_mapping["boolean"]:
559
- aggregates += f", MAX(SELECT COUNT(*) FROM {object.database}.{object.schema}.{object.name} WHERE {column_identifier} = true)::VARCHAR || '_' || MAX(SELECT COUNT(*) FROM {object.database}.{object.schema}.{object.name} WHERE {column_identifier} = false) :: VARCHAR AS \"AGGREGATEBOOLEAN_{column}\""
559
+ aggregates += f", COUNT(CASE WHEN {column_identifier} = true THEN 1 ELSE NULL END)::VARCHAR || '_' || COUNT(CASE WHEN {column_identifier} = false THEN 1 ELSE NULL END)::VARCHAR AS \"AGGREGATEBOOLEAN_{column}\""
560
560
 
561
561
  # else: Additional Data Types: VARIANT OBJECT ARRAY GEOGRAPHY
562
562
 
@@ -81,21 +81,21 @@ class SQLServerService:
81
81
  str: in clause as string
82
82
  """
83
83
  values = list(key_filters.values())
84
- in_clause_values = "('"
84
+ in_clause_values = "'"
85
85
  for j in range(len(values[0])):
86
86
  for value in values:
87
87
  in_clause_values += str(value[j]) + "','"
88
- in_clause_values = in_clause_values[:-2] + "),('"
89
- in_clause_values = in_clause_values[:-3] + ")"
88
+ in_clause_values = in_clause_values[:-2] + ",'"
89
+ in_clause_values = in_clause_values[:-3] + "'"
90
90
 
91
91
  in_clause_cols = " AND (("
92
92
  for key in key_filters.keys():
93
93
  if key in numeric_columns:
94
- in_clause_cols += f"""ROUND({key.replace("'", "")}, {numeric_scale})""" + ","
94
+ in_clause_cols += f"""cast(ROUND({key.replace("'", "")}, {numeric_scale}) as numeric(38, {numeric_scale}))""" + ","
95
95
  else:
96
96
  in_clause_cols += key.replace("'", "") + ","
97
97
  in_clause_cols = in_clause_cols[:-1] + ")"
98
- in_clause = in_clause_cols + " in (" + in_clause_values + ")"
98
+ in_clause = in_clause_cols + " in (" + in_clause_values + "))"
99
99
  return in_clause
100
100
 
101
101
  def _get_column_clause(self, column_list: list, columns_datatype: list, numeric_scale, key_columns,
@@ -484,7 +484,7 @@ class SQLServerService:
484
484
  aggregates += f", COUNT(DISTINCT LOWER(TRY_CONVERT(VARCHAR,{column}))) AS COUNTDISTINCT_{column}"
485
485
 
486
486
  elif column_datatype.lower() in self.sqlserver_datatype_mapping["boolean"]:
487
- aggregates += f", CONCAT(CONCAT(CONVERT(VARCHAR,COUNT(CASE WHEN {column} = 1 THEN 1 ELSE NULL END)) , '_'), CONVERT(VARCHAR, COUNT(CASE WHEN {column} = 0 THEN 1 ELSE NULL END)))) AS AGGREGATEBOOLEAN_{column}"
487
+ aggregates += f", CONCAT(CONCAT(CONVERT(VARCHAR,COUNT(CASE WHEN {column} = 1 THEN 1 ELSE NULL END)) , '_'), CONVERT(VARCHAR, COUNT(CASE WHEN {column} = 0 THEN 1 ELSE NULL END))) AS AGGREGATEBOOLEAN_{column}"
488
488
 
489
489
  #else: Additional Data Types: image , sql_variant, uniqueidentifier, xml, cursor, table, column_datatype.lower() == 'bit' or
490
490
 
@@ -614,7 +614,7 @@ class SQLServerService:
614
614
  aggregates += f", COUNT(DISTINCT LOWER(TRY_CONVERT(VARCHAR,{column}))) AS COUNTDISTINCT_{column}"
615
615
 
616
616
  elif not only_numeric and column_datatype.lower() in self.sqlserver_datatype_mapping["boolean"]:
617
- aggregates += f", CONCAT(CONCAT(CONVERT(VARCHAR,COUNT(CASE WHEN {column} = 1 THEN 1 ELSE NULL END)) , '_'), CONVERT(VARCHAR, COUNT(CASE WHEN {column} = 0 THEN 1 ELSE NULL END)))) AS AGGREGATEBOOLEAN_{column}"
617
+ aggregates += f", CONCAT(CONCAT(CONVERT(VARCHAR,COUNT(CASE WHEN {column} = 1 THEN 1 ELSE NULL END)) , '_'), CONVERT(VARCHAR, COUNT(CASE WHEN {column} = 0 THEN 1 ELSE NULL END))) AS AGGREGATEBOOLEAN_{column}"
618
618
 
619
619
  # else: Additional Data Types: VARIANT OBJECT ARRAY GEOGRAPHY
620
620
 
@@ -744,7 +744,7 @@ class SQLServerService:
744
744
  SELECT TOP ({sample_count}) {column_clause}
745
745
  FROM {object.schema}.{object.name}
746
746
  {where_clause}{in_clause}
747
- ORDER BY NEWID(), {keys};
747
+ ORDER BY {keys};
748
748
  """
749
749
  elif key_intersection != [] and not is_dedicated:
750
750
  keys = str(key_intersection)[1:-1].replace("'", "")
@@ -764,7 +764,7 @@ class SQLServerService:
764
764
  SELECT TOP ({sample_count}) {column_clause}
765
765
  FROM {object.schema}.{object.name}
766
766
  {where_clause}{in_clause}
767
- ORDER BY NEWID(), {keys};
767
+ ORDER BY {keys};
768
768
  """
769
769
  else:
770
770
  column_intersections = list(set(column_intersections) - set(exclude_columns))
@@ -834,12 +834,12 @@ class SQLServerService:
834
834
 
835
835
  for single_query in query_list:
836
836
  try:
837
+ query_result=cursor.execute(single_query).fetchall()
838
+ columns = [column[0] for column in cursor.description]
839
+ query_result = [dict(zip(columns, row)) for row in query_result]
840
+
837
841
  if return_as_pdf:
838
- query_result = pandas.io.sql.read_sql(single_query, self.sqlserver_connection)
839
- else:
840
- query_result=cursor.execute(single_query).fetchall()
841
- columns = [column[0] for column in cursor.description]
842
- query_result = [dict(zip(columns, row)) for row in query_result]
842
+ query_result = pd.DataFrame(query_result)
843
843
 
844
844
  results.append(query_result)
845
845
  except Exception as err:
@@ -641,11 +641,11 @@ class TeradataService(object):
641
641
 
642
642
  for single_query in query_list:
643
643
  try:
644
+ cursor=self.teradata_connection.cursor()
645
+ query_result=cursor.execute(single_query)
646
+
644
647
  if return_as_pdf:
645
- query_result = pd.read_sql(query, self.teradata_connection)
646
- else:
647
- cursor=self.teradata_connection.cursor()
648
- query_result=cursor.execute(single_query)
648
+ query_result = pd.DataFrame(query_result)
649
649
 
650
650
  results.append(query_result)
651
651
 
@@ -81,7 +81,7 @@ class ResultService(TestingToolParams):
81
81
 
82
82
  @staticmethod
83
83
  def _compare_column_datatypes(
84
- src_datatype: str,
84
+ src_datatype: str,
85
85
  trgt_datatype: str
86
86
  ):
87
87
  """
@@ -96,7 +96,10 @@ class ResultService(TestingToolParams):
96
96
  elif "DATATYPE_MAPPING" in TestingToolParams.migration_config["MAPPING"] and TestingToolParams.migration_config["MAPPING"]["DATATYPE_MAPPING"]:
97
97
  datatype_equal = False
98
98
  for datatype_mapping in TestingToolParams.migration_config["MAPPING"]["DATATYPE_MAPPING"]:
99
- if src_datatype in datatype_mapping["src_datatypes"] and trgt_datatype in datatype_mapping["trgt_datatypes"]:
99
+ if (
100
+ src_datatype.lower() in [datatype.lower() for datatype in datatype_mapping["src_datatypes"]]
101
+ and trgt_datatype.lower() in [datatype.lower() for datatype in datatype_mapping["trgt_datatypes"]]
102
+ ):
100
103
  datatype_equal = True
101
104
  else:
102
105
  datatype_equal = False
@@ -106,8 +109,8 @@ class ResultService(TestingToolParams):
106
109
 
107
110
  @staticmethod
108
111
  def prepare_column_level_result(
109
- column: str,
110
- exclude_columns: list,
112
+ column: str,
113
+ exclude_columns: list,
111
114
  result_params: ResultParams
112
115
  ) -> dict:
113
116
  """
@@ -122,7 +125,7 @@ class ResultService(TestingToolParams):
122
125
  aggregation_result_trgt = None
123
126
  aggregation_equal = None
124
127
  aggregation_tolerated = None
125
- aggregation_difference_trgt_minus_src = None
128
+ aggregation_difference_trgt_minus_src = None
126
129
  count_nulls_src = None
127
130
  count_nulls_trgt = None
128
131
  count_nulls_equal = None
@@ -146,11 +149,11 @@ class ResultService(TestingToolParams):
146
149
  in_excluded = True
147
150
  else:
148
151
  in_excluded = False
149
-
152
+
150
153
  if in_src and in_trgt :
151
154
  in_sync=True
152
155
  if result_params.src_columns_aggregate != {}:
153
-
156
+
154
157
  if column in result_params.src_columns_aggregate:
155
158
  aggregation_type_src = result_params.src_columns_aggregate[column][0]
156
159
  aggregation_result_src = result_params.src_columns_aggregate[column][1]
@@ -158,48 +161,48 @@ class ResultService(TestingToolParams):
158
161
 
159
162
  if column in result_params.trgt_columns_aggregate:
160
163
  aggregation_type_trgt = result_params.trgt_columns_aggregate[column][0]
161
- aggregation_result_trgt = result_params.trgt_columns_aggregate[column][1]
164
+ aggregation_result_trgt = result_params.trgt_columns_aggregate[column][1]
162
165
  count_nulls_trgt = result_params.trgt_columns_aggregate[column][2]
163
166
 
164
167
  if column in result_params.aggregation_differences_trgt_minus_src and result_params.aggregation_differences_trgt_minus_src[column] and not result_params.aggregation_differences_trgt_minus_src[column] == '0_0':
165
168
  aggregation_equal = False
166
169
  aggregation_difference_trgt_minus_src = result_params.aggregation_differences_trgt_minus_src[column]
167
-
170
+
168
171
  elif aggregation_result_src is not None and aggregation_result_trgt is not None and aggregation_type_src and aggregation_type_trgt and aggregation_type_src == aggregation_type_trgt:
169
172
  aggregation_equal = True
170
-
173
+
171
174
  if column in result_params.aggregation_differences_trgt_minus_src and result_params.aggregation_differences_trgt_minus_src[column] == '0_0':
172
175
  aggregation_difference_trgt_minus_src='0_0'
173
-
174
- else:
176
+
177
+ else:
175
178
  aggregation_difference_trgt_minus_src='0'
176
-
179
+
177
180
  if aggregation_type_src and aggregation_type_trgt and aggregation_type_src == aggregation_type_trgt:
178
181
  aggregation_type = aggregation_type_src
179
-
182
+
180
183
  '''
181
184
  Comparison Based on Decimal Places
182
185
  Logic is defined in migration_config.json
183
186
  '''
184
187
  aggregation_tolerated = aggregation_equal
185
-
188
+
186
189
  if 'DATATYPE_TOLERANCE' in TestingToolParams.migration_config['MAPPING'].keys():
187
190
  if (
188
- src_datatype in TestingToolParams.migration_config['MAPPING']['DATATYPE_TOLERANCE'].keys()
189
- and aggregation_type == 'SUM'
191
+ src_datatype in TestingToolParams.migration_config['MAPPING']['DATATYPE_TOLERANCE'].keys()
192
+ and aggregation_type == 'SUM'
190
193
  and abs(Decimal(aggregation_difference_trgt_minus_src)) <= Decimal(TestingToolParams.migration_config['MAPPING']['DATATYPE_TOLERANCE'][src_datatype])
191
194
  ):
192
195
  aggregation_tolerated = True
193
- else :
196
+ else :
194
197
  aggregation_tolerated = None
195
-
198
+
196
199
  if count_nulls_src is not None and count_nulls_trgt is not None and count_nulls_src==count_nulls_trgt:
197
200
  count_nulls_equal = True
198
201
  count_nulls_difference_trgt_minus_src = '0'
199
202
  elif count_nulls_src is not None and count_nulls_trgt is not None:
200
203
  count_nulls_equal = False
201
204
  count_nulls_difference_trgt_minus_src = int(count_nulls_trgt)-int(count_nulls_src)
202
-
205
+
203
206
  datatype_equal = ResultService._compare_column_datatypes(src_datatype, trgt_datatype)
204
207
 
205
208
  column_comparison_result = {
@@ -224,15 +227,15 @@ class ResultService(TestingToolParams):
224
227
  }
225
228
 
226
229
  return column_comparison_result
227
-
230
+
228
231
  @staticmethod
229
232
  def prepare_object_level_result(
230
- src_object: DatabaseObject,
231
- trgt_object: DatabaseObject,
232
- src_filter: str,
233
- trgt_filter: str,
234
- exclude_columns: list,
235
- result_params: ResultParams,
233
+ src_object: DatabaseObject,
234
+ trgt_object: DatabaseObject,
235
+ src_filter: str,
236
+ trgt_filter: str,
237
+ exclude_columns: list,
238
+ result_params: ResultParams,
236
239
  column_level_comparison_result: dict
237
240
  ) -> dict:
238
241
  """
@@ -268,9 +271,9 @@ class ResultService(TestingToolParams):
268
271
  "ROW_COUNTS_EQUAL": result_params.row_counts_equal,
269
272
  "SRC_ROW_COUNT": result_params.src_row_count,
270
273
  "TRGT_ROW_COUNT": result_params.trgt_row_count,
271
- "ALL_COUNT_NULLS_EQUAL": result_params.all_count_nulls_equal,
272
- "AGGREGATIONS_EQUAL": result_params.aggregations_equal,
273
- "AGGREGATIONS_EQUAL_TOLERATED": aggregations_equal_tolerated,
274
+ "ALL_COUNT_NULLS_EQUAL": result_params.all_count_nulls_equal,
275
+ "AGGREGATIONS_EQUAL": result_params.aggregations_equal,
276
+ "AGGREGATIONS_EQUAL_TOLERATED": aggregations_equal_tolerated,
274
277
  "SRC_ERROR": result_params.src_error_dict,
275
278
  "TRGT_ERROR": result_params.trgt_error_dict,
276
279
  "GROUP_BY_COLUMNS": result_params.object_group_by_columns,
@@ -280,13 +283,13 @@ class ResultService(TestingToolParams):
280
283
  "GROUP_BY_VALUES_WITH_MISMATCHES": result_params.group_by_values_with_mismatches,
281
284
  "COLUMNS_WITH_MISMATCH": result_params.columns_with_mismatch,
282
285
  "GROUP_BY_DIFF_DICT": result_params.group_by_diff_dict,
283
- "SRC_GROUP_BY_ERROR": result_params.src_group_by_error,
286
+ "SRC_GROUP_BY_ERROR": result_params.src_group_by_error,
284
287
  "TRGT_GROUP_BY_ERROR": result_params.trgt_group_by_error,
285
- "SAMPLES_COMPARED": result_params.samples_compared,
286
- "SAMPLES_EQUAL": result_params.samples_equal,
287
- "SAMPLE_KEYS": result_params.trgt_key_filters,
288
- "SRC_SAMPLE": result_params.src_sample_dict,
289
- "TRGT_SAMPLE": result_params.trgt_sample_dict,
288
+ "SAMPLES_COMPARED": result_params.samples_compared,
289
+ "SAMPLES_EQUAL": result_params.samples_equal,
290
+ "SAMPLE_KEYS": result_params.trgt_key_filters,
291
+ "SRC_SAMPLE": result_params.src_sample_dict,
292
+ "TRGT_SAMPLE": result_params.trgt_sample_dict,
290
293
  "SRC_SAMPLE_QUERY": result_params.src_sample_query,
291
294
  "TRGT_SAMPLE_QUERY": result_params.trgt_sample_query,
292
295
  "SRC_SAMPLE_ERROR_DICT": result_params.src_sample_error_dict,
@@ -307,8 +310,8 @@ class ResultService(TestingToolParams):
307
310
 
308
311
  @staticmethod
309
312
  def prepare_object_level_live_result(
310
- object_level_comparison_result: dict,
311
- testing_tool_params: TestingToolParams,
313
+ object_level_comparison_result: dict,
314
+ testing_tool_params: TestingToolParams,
312
315
  ) -> dict:
313
316
  """
314
317
  Get object level live result dictionary from the object level comparison result and from the testing tool parameters.
@@ -324,7 +327,7 @@ class ResultService(TestingToolParams):
324
327
  "OBJECTS": object_level_comparison_result
325
328
  }
326
329
 
327
- return live_object_level_comparison_result
330
+ return live_object_level_comparison_result
328
331
 
329
332
  def determine_highlevel_results(self):
330
333
  """
@@ -368,9 +371,9 @@ class ResultService(TestingToolParams):
368
371
 
369
372
  # TODO add ALL_OBJECTS_NOT_ALTERED_DURING_COMPARISON flag
370
373
  #if any(object_level_comparison_result['NOT_ALTERED_DURING_COMPARISON_SRC'] == False for object_level_comparison_result in self.results["OBJECTS"]) or any(object_level_comparison_result['NOT_ALTERED_DURING_COMPARISON_TRGT'] == False for object_level_comparison_result in self.results["OBJECTS"]):
371
- # self.results["ALL_OBJECTS_NOT_ALTERED_DURING_COMPARISON"] = False
374
+ # self.results["ALL_OBJECTS_NOT_ALTERED_DURING_COMPARISON"] = False
372
375
  #else:
373
- # self.results["ALL_OBJECTS_NOT_ALTERED_DURING_COMPARISON"] = True
376
+ # self.results["ALL_OBJECTS_NOT_ALTERED_DURING_COMPARISON"] = True
374
377
 
375
378
  logger.info("\n****************************************************")
376
379
  logger.info(f"++++++++++++++++ Highlevel results ++++++++++++++++")
@@ -383,8 +386,8 @@ class ResultService(TestingToolParams):
383
386
  logger.info(f"ALL_CHECKSUMS_EQUAL: {self.results['ALL_CHECKSUMS_EQUAL']}")
384
387
  logger.info(f"ALL_SAMPLES_EQUAL: {self.results['ALL_SAMPLES_EQUAL']}")
385
388
  logger.info(f"ALL_OBJECTS_EQUAL: {self.results['ALL_OBJECTS_EQUAL']}")
386
- logger.info("****************************************************\n")
387
-
389
+ logger.info("****************************************************\n")
390
+
388
391
 
389
392
  def load_results_to_result_database(self):
390
393
  """
@@ -400,12 +403,12 @@ class ResultService(TestingToolParams):
400
403
  with database_service_result as db_service_result:
401
404
 
402
405
  load_results_function = self.load_results_function_mapping[result_system_selection_type.upper()]
403
-
406
+
404
407
  if load_results_function:
405
408
  load_results_function(db_service_result, self.results)
406
409
  else:
407
410
  raise ValueError(f"Result system selection of type '{result_system_selection_type}' not supported!")
408
-
411
+
409
412
  def load_results_to_snowflake(self, db_service_result, results: dict):
410
413
  """
411
414
  Load results to Snowflake.
@@ -474,9 +477,9 @@ class ResultService(TestingToolParams):
474
477
  blob_file_name = f"comparison_results_{start_time_utc}_{self.pipeline_name}_{self.pipeline_id}_{self.run_guid}.json"
475
478
  blob_name = f"{blob_file_prefix}/{blob_file_name}"
476
479
 
477
- try:
480
+ try:
478
481
  blob_service_client = BlobServiceClient.from_connection_string(conn_str=self.azure_storage_connection_string)
479
- except Exception as error:
482
+ except Exception as error:
480
483
  logger.info(f"FAILED to connect to Azure Blob Storage with error '{str(error)}'")
481
484
  raise error
482
485
 
@@ -510,13 +513,13 @@ class ResultService(TestingToolParams):
510
513
  bucket_file_info = f"comparison_results_{start_time_utc}_{self.pipeline_name}_{self.pipeline_id}_{self.run_guid}.json"
511
514
  bucket_file_name = f"{bucket_file_prefix}_-_{bucket_file_info}"
512
515
 
513
- try:
516
+ try:
514
517
  s3_service_client = boto3.client(
515
518
  's3',
516
519
  aws_access_key_id=self.aws_bucket_access_key,
517
520
  aws_secret_access_key=self.aws_bucket_secret_key
518
521
  )
519
- except Exception as error:
522
+ except Exception as error:
520
523
  logger.info(f"FAILED to connect to AWS S3 bucket with error '{str(error)}'")
521
524
  raise error
522
525
 
@@ -533,7 +536,7 @@ class ResultService(TestingToolParams):
533
536
 
534
537
  def write_results_to_git(self):
535
538
  """
536
- Write comparison results to GIT repository.
539
+ Write comparison results to GIT repository.
537
540
  In case of a remote pipeline run: Pull latest changes from GIT befor writing to the local repository, and push to the remote repository at the end.
538
541
  """
539
542
  logger.info(f"++++++++++++++++ WRITE comparison results to GIT repository")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: icsDataValidation
3
- Version: 1.0.425
3
+ Version: 1.0.428
4
4
  Summary: Add your description here
5
5
  Author-email: initions <ICSMC_EXT_PYPIORG@accenture.com>
6
6
  License: MIT