dc-python-sdk 1.5.28__tar.gz → 1.5.30__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {dc_python_sdk-1.5.28/src/dc_python_sdk.egg-info → dc_python_sdk-1.5.30}/PKG-INFO +1 -1
  2. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/pyproject.toml +1 -1
  3. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/setup.cfg +1 -1
  4. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30/src/dc_python_sdk.egg-info}/PKG-INFO +1 -1
  5. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/app.py +6 -1
  6. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/handler.py +4 -1
  7. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/mapping.py +2 -2
  8. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/models/pipeline_details.py +1 -0
  9. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/pipeline.py +16 -23
  10. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/api.py +6 -3
  11. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/environment.py +1 -0
  12. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/LICENSE +0 -0
  13. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/README.md +0 -0
  14. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_python_sdk.egg-info/SOURCES.txt +0 -0
  15. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_python_sdk.egg-info/dependency_links.txt +0 -0
  16. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_python_sdk.egg-info/entry_points.txt +0 -0
  17. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_python_sdk.egg-info/requires.txt +0 -0
  18. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_python_sdk.egg-info/top_level.txt +0 -0
  19. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/__init__.py +0 -0
  20. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/cli.py +0 -0
  21. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/errors.py +0 -0
  22. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/__init__.py +0 -0
  23. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/ai.py +0 -0
  24. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/ai_http.py +0 -0
  25. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/models/__init__.py +0 -0
  26. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/models/enums.py +0 -0
  27. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/models/errors.py +0 -0
  28. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/models/log_templates.py +0 -0
  29. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/server.py +0 -0
  30. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/__init__.py +0 -0
  31. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/aws.py +0 -0
  32. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/loader.py +0 -0
  33. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/logger.py +0 -0
  34. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/src/services/session.py +0 -0
  35. {dc_python_sdk-1.5.28 → dc_python_sdk-1.5.30}/src/dc_sdk/types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dc-python-sdk
3
- Version: 1.5.28
3
+ Version: 1.5.30
4
4
  Summary: Data Connector Python SDK
5
5
  Home-page: https://github.com/data-connector/dc-python-sdk
6
6
  Author: DataConnector
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dc-python-sdk"
7
- version = "1.5.28"
7
+ version = "1.5.30"
8
8
  description = "Data Connector Python SDK"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.6"
@@ -1,6 +1,6 @@
1
1
  [metadata]
2
2
  name = dc-python-sdk
3
- version = 1.5.28
3
+ version = 1.5.30
4
4
  author = DataConnector
5
5
  author_email = josh@dataconnector.com
6
6
  description = A small example package
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dc-python-sdk
3
- Version: 1.5.28
3
+ Version: 1.5.30
4
4
  Summary: Data Connector Python SDK
5
5
  Home-page: https://github.com/data-connector/dc-python-sdk
6
6
  Author: DataConnector
@@ -26,7 +26,7 @@ def run_pipeline():
26
26
  pipeline_run_history_id = api.create_new_history(PipelineEnvironment.pipeline_id)
27
27
  PipelineEnvironment.set_pipeline_run_history_id(pipeline_run_history_id)
28
28
 
29
- pipeline_conductor = PipelineConductor(PipelineEnvironment.task, pipeline_id=PipelineEnvironment.pipeline_id, pipeline_run_history_id=PipelineEnvironment.pipeline_run_history_id)
29
+ pipeline_conductor = PipelineConductor(PipelineEnvironment.task, pipeline_id=PipelineEnvironment.pipeline_id, pipeline_run_history_id=PipelineEnvironment.pipeline_run_history_id, pipeline_mapping_id=PipelineEnvironment.pipeline_mapping_id if PipelineEnvironment.pipeline_mapping_id else None)
30
30
  pipeline_conductor.internal_log(pipeline_conductor.log_templates.INTERNAL_CONNECTOR_START.format(TASK.title(), pipeline_conductor.pipeline_details.connector_nm))
31
31
  except Exception as e:
32
32
  error_trace = traceback.format_exc()
@@ -65,6 +65,11 @@ def run_pipeline():
65
65
  if TASK == TasksEnum.SOURCE.value:
66
66
  pipeline_conductor.authenticate_source()
67
67
  pipeline_conductor.pipeline_details.increment_stage()
68
+
69
+ if not pipeline_conductor.pipeline_details.pipeline_mapping_json:
70
+ pipeline_conductor.configure_fields()
71
+
72
+ pipeline_conductor.pipeline_details.increment_stage()
68
73
  pipeline_conductor.get_data()
69
74
 
70
75
  pipeline_conductor.pipeline_details.increment_stage()
@@ -27,6 +27,9 @@ def handler(event, context):
27
27
  object_id = event['object_id'] if 'object_id' in event else None
28
28
  field_ids = event['mapping'] if 'mapping' in event else None
29
29
  options = event['options'] if 'options' in event else dict()
30
+ next_page = event['next_page'] if 'next_page' in event else None
31
+ n_rows = event['n_rows'] if 'n_rows' in event else None
32
+ filters = event['filters'] if 'filters' in event else None
30
33
 
31
34
  try:
32
35
  action_name = get_action_name(action)
@@ -41,7 +44,7 @@ def handler(event, context):
41
44
  results, message = mapping.get_fields(object_id, options)
42
45
  elif action == 2:
43
46
  results, message = mapping.get_five_row_preview(
44
- object_id, field_ids, options)
47
+ object_id, field_ids, options, n_rows, filters, next_page)
45
48
  elif action == 3:
46
49
  results, message = mapping.test_connection()
47
50
  else:
@@ -82,12 +82,12 @@ class Mapping():
82
82
 
83
83
  return [results, message]
84
84
 
85
- def get_five_row_preview(self, object_id, field_ids, options):
85
+ def get_five_row_preview(self, object_id, field_ids, options, n_rows = 5, filters = None, next_page = None):
86
86
  results = None
87
87
  message = None
88
88
 
89
89
  if self.connector.authenticate():
90
- results = self.connector.get_data(object_id, field_ids=field_ids, n_rows=5, options=options)
90
+ results = self.connector.get_data(object_id, field_ids=field_ids, n_rows=n_rows, filters=filters, next_page=next_page, options=options)
91
91
  message = "Retrieved 5 row preview"
92
92
  else:
93
93
  # If not authenticated, raise the authentication error.
@@ -11,6 +11,7 @@ class PipelineDetails:
11
11
  self.destination_object_id = row_data['destination_object_id']
12
12
  self.pipeline_mapping_json = row_data['pipeline_mapping_json']
13
13
  self.update_method_cd = row_data['update_method_cd']
14
+ self.primary_key_column_nm = row_data['primary_key_column_nm']
14
15
  self.source_connector_id = row_data['source_connector_id']
15
16
  self.source_connector_nm = row_data['source_connector_nm']
16
17
  self.destination_connector_id = row_data['destination_connector_id']
@@ -33,13 +33,14 @@ class PipelineConductor:
33
33
  self.prefix = kwargs.get("prefix")
34
34
  self.update_method = kwargs.get("update_method")
35
35
  self.mapping = kwargs.get("mapping")
36
+ self.pipeline_object_id = kwargs.get("pipeline_mapping_id")
36
37
  self.successful_keys = []
37
38
  self.config = PipelineEnvironment
38
39
  self.api = api or DataConnectorAPI()
39
40
  self.aws = aws or AwsService(PipelineEnvironment.aws_s3_bucket)
40
41
 
41
42
  # Set Pipeline Details
42
- self.pipeline_details = self._get_pipeline_details() if mode == "prod" else self._get_pipeline_details_dev()
43
+ self.pipeline_details = self._get_pipeline_details()
43
44
 
44
45
  self.credentials = self._get_credentials() if mode == "prod" else kwargs.get('credentials')
45
46
 
@@ -202,10 +203,22 @@ class PipelineConductor:
202
203
  if unhandled:
203
204
  print("An unrecognized issue has occurred on our side. Our team will be in contact within 24-48 hours, or try emailing support@dataconnector.com.")
204
205
 
205
-
206
206
  def update_history(self, payload):
207
207
  self.api.put("{0}/history".format(self.pipeline_run_history_id), payload)
208
208
 
209
+ def configure_fields(self):
210
+ # get fields from connector
211
+ fields = self.connector.get_fields(self.pipeline_details.source_object_id)
212
+
213
+ if not fields:
214
+ raise errors.NoFieldsFoundError(self.pipeline_details.source_object_id)
215
+
216
+ mapping = [ { "column": field["field_id"], "mapped": field["field_id"] } for field in fields ]
217
+
218
+ self.pipeline_details.pipeline_mapping_json = json.dumps(mapping)
219
+
220
+ self.api.create_pipeline_mapping(self.pipeline_id, self.pipeline_details.pipeline_mapping_json)
221
+
209
222
  def _process_rows(self, rows, max_allowed=None):
210
223
  # Check if we have a limit and need to truncate rows
211
224
  limit_reached = False
@@ -269,27 +282,7 @@ class PipelineConductor:
269
282
  return self.aws.decrypt_customer_data_object(encyption_txt, self.pipeline_details.customer_metadata_uuid)
270
283
 
271
284
  def _get_pipeline_details(self):
272
- return self.api.get_pipeline_details(str(self.pipeline_id), self.task, str(self.pipeline_run_history_id))
273
-
274
- def _get_pipeline_details_dev(self):
275
- class PipelineDetail:
276
- def __init__(self, object_id, filters, mapping, update_method) -> None:
277
- self.source_object_id = object_id
278
- self.destination_object_id = object_id
279
- self.pipeline_mapping_json = mapping
280
- self.update_method_cd = update_method
281
- self.source_credential_nm = "Test"
282
- self.destination_credential_nm = "Test"
283
- # TODO: UPDATE OPTIONS
284
- self.options = None
285
- # self.filtered_column_nm = filters['filtered_column_nm'] if 'filtered_'
286
- # self.start_selection_nm = filters['start_selection_nm']
287
- # self.start_value_txt =filters['start_value_txt']
288
- # self.end_selection_nm = filters['end_selection_nm']
289
- # self.end_value_txt = filters['end_value_txt']
290
- # self.timezone_offset_nbr = filters['timezone_offset_nbr']
291
-
292
- return PipelineDetail(self.object_id, self.filters, self.mapping, self.update_method)
285
+ return self.api.get_pipeline_details(str(self.pipeline_id), self.task, str(self.pipeline_run_histoy_id), pipeline_mapping_id=self.pipeline_mapping_id)
293
286
 
294
287
  def _get_batch_row_count(self):
295
288
  results = self.connector.get_data(
@@ -45,13 +45,16 @@ class DataConnectorAPI:
45
45
 
46
46
  self.post('logv2', payload)
47
47
 
48
- def get_pipeline_details(self, pipeline_id: str, task, pipeline_run_history_id: str):
49
- json = self.get(pipeline_id)
48
+ def get_pipeline_details(self, pipeline_id: str, task, pipeline_run_history_id: str, pipeline_mapping_id: str = None):
49
+ url = f"{pipeline_id}"
50
+ if pipeline_object_id:
51
+ url = f"{url}?PipelineMappingID={pipeline_mapping_id}"
52
+ json = self.get(url)
50
53
 
51
54
  return PipelineDetails(json, task, pipeline_id, pipeline_run_history_id)
52
55
 
53
56
  def create_new_history(self, pipeline_id):
54
- response = self.post(f"{pipeline_id}/history", None)
57
+ response = self.post(f"{pipeline_id}/history", {"PipelineMappingID": self.pipeline_mapping_id})
55
58
 
56
59
  return response['PipelineRunHistoryID']
57
60
 
@@ -27,6 +27,7 @@ class PipelineEnvironment:
27
27
  task_id: ClassVar[str] = None
28
28
  source_endpoint: ClassVar[str] = os.getenv("SOURCE_ENDPOINT") or "http://localhost:5000"
29
29
  destination_endpoint: ClassVar[str] = os.getenv("DESTINATION_ENDPOINT") or "http://localhost:5001"
30
+ pipeline_mapping_id: ClassVar[str] = os.getenv("PIPELINE_MAPPING_ID") or ""
30
31
 
31
32
  @staticmethod
32
33
  def validate_environment():
File without changes
File without changes