dc-python-sdk 1.5.9__tar.gz → 1.5.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dc_python_sdk-1.5.9/src/dc_python_sdk.egg-info → dc_python_sdk-1.5.10}/PKG-INFO +2 -1
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/pyproject.toml +3 -2
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/setup.cfg +2 -1
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10/src/dc_python_sdk.egg-info}/PKG-INFO +2 -1
- dc_python_sdk-1.5.10/src/dc_python_sdk.egg-info/SOURCES.txt +32 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_python_sdk.egg-info/requires.txt +1 -0
- dc_python_sdk-1.5.10/src/dc_sdk/app.py +187 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_sdk/handler.py +1 -1
- dc_python_sdk-1.5.10/src/dc_sdk/src/__init__.py +0 -0
- {dc_python_sdk-1.5.9/src/dc_sdk → dc_python_sdk-1.5.10/src/dc_sdk/src}/mapping.py +1 -1
- dc_python_sdk-1.5.10/src/dc_sdk/src/models/__init__.py +0 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/models/enums.py +35 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/models/errors.py +323 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/models/log_templates.py +28 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/models/pipeline_details.py +37 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/pipeline.py +381 -0
- {dc_python_sdk-1.5.9/src/dc_sdk → dc_python_sdk-1.5.10/src/dc_sdk/src}/server.py +2 -2
- dc_python_sdk-1.5.10/src/dc_sdk/src/services/__init__.py +0 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/services/api.py +109 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/services/aws.py +141 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/services/environment.py +137 -0
- dc_python_sdk-1.5.10/src/dc_sdk/src/services/logger.py +65 -0
- dc_python_sdk-1.5.9/src/dc_python_sdk.egg-info/SOURCES.txt +0 -22
- dc_python_sdk-1.5.9/src/dc_sdk/conftest.py +0 -369
- dc_python_sdk-1.5.9/src/dc_sdk/pytest.txt +0 -11
- dc_python_sdk-1.5.9/src/dc_sdk/test_connector.py +0 -477
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/LICENSE +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/README.md +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_python_sdk.egg-info/dependency_links.txt +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_python_sdk.egg-info/entry_points.txt +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_python_sdk.egg-info/top_level.txt +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_sdk/__init__.py +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_sdk/cli.py +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_sdk/errors.py +0 -0
- {dc_python_sdk-1.5.9/src/dc_sdk → dc_python_sdk-1.5.10/src/dc_sdk/src/services}/loader.py +0 -0
- {dc_python_sdk-1.5.9/src/dc_sdk → dc_python_sdk-1.5.10/src/dc_sdk/src/services}/session.py +0 -0
- {dc_python_sdk-1.5.9 → dc_python_sdk-1.5.10}/src/dc_sdk/types.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dc-python-sdk
|
|
3
|
-
Version: 1.5.
|
|
3
|
+
Version: 1.5.10
|
|
4
4
|
Summary: Data Connector Python SDK
|
|
5
5
|
Home-page: https://github.com/data-connector/dc-python-sdk
|
|
6
6
|
Author: DataConnector
|
|
@@ -16,6 +16,7 @@ Requires-Dist: fastapi
|
|
|
16
16
|
Requires-Dist: uvicorn
|
|
17
17
|
Requires-Dist: awslambdaric
|
|
18
18
|
Requires-Dist: requests
|
|
19
|
+
Requires-Dist: boto3>=1.40.0
|
|
19
20
|
Provides-Extra: test
|
|
20
21
|
Requires-Dist: python-dotenv>=0.20.0; extra == "test"
|
|
21
22
|
Requires-Dist: faker>=13.12.0; extra == "test"
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "dc-python-sdk"
|
|
7
|
-
version = "1.5.
|
|
7
|
+
version = "1.5.10"
|
|
8
8
|
description = "Data Connector Python SDK"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.6"
|
|
@@ -20,7 +20,8 @@ dependencies = [
|
|
|
20
20
|
"fastapi",
|
|
21
21
|
"uvicorn",
|
|
22
22
|
"awslambdaric",
|
|
23
|
-
"requests"
|
|
23
|
+
"requests",
|
|
24
|
+
"boto3>=1.40.0"
|
|
24
25
|
]
|
|
25
26
|
|
|
26
27
|
[project.optional-dependencies]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[metadata]
|
|
2
2
|
name = dc-python-sdk
|
|
3
|
-
version = 1.5.
|
|
3
|
+
version = 1.5.10
|
|
4
4
|
author = DataConnector
|
|
5
5
|
author_email = josh@dataconnector.com
|
|
6
6
|
description = A small example package
|
|
@@ -24,6 +24,7 @@ install_requires =
|
|
|
24
24
|
uvicorn
|
|
25
25
|
awslambdaric
|
|
26
26
|
requests
|
|
27
|
+
boto3>=1.40.0
|
|
27
28
|
|
|
28
29
|
[options.package_data]
|
|
29
30
|
* = pytest.txt
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dc-python-sdk
|
|
3
|
-
Version: 1.5.
|
|
3
|
+
Version: 1.5.10
|
|
4
4
|
Summary: Data Connector Python SDK
|
|
5
5
|
Home-page: https://github.com/data-connector/dc-python-sdk
|
|
6
6
|
Author: DataConnector
|
|
@@ -16,6 +16,7 @@ Requires-Dist: fastapi
|
|
|
16
16
|
Requires-Dist: uvicorn
|
|
17
17
|
Requires-Dist: awslambdaric
|
|
18
18
|
Requires-Dist: requests
|
|
19
|
+
Requires-Dist: boto3>=1.40.0
|
|
19
20
|
Provides-Extra: test
|
|
20
21
|
Requires-Dist: python-dotenv>=0.20.0; extra == "test"
|
|
21
22
|
Requires-Dist: faker>=13.12.0; extra == "test"
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
setup.cfg
|
|
5
|
+
src/dc_python_sdk.egg-info/PKG-INFO
|
|
6
|
+
src/dc_python_sdk.egg-info/SOURCES.txt
|
|
7
|
+
src/dc_python_sdk.egg-info/dependency_links.txt
|
|
8
|
+
src/dc_python_sdk.egg-info/entry_points.txt
|
|
9
|
+
src/dc_python_sdk.egg-info/requires.txt
|
|
10
|
+
src/dc_python_sdk.egg-info/top_level.txt
|
|
11
|
+
src/dc_sdk/__init__.py
|
|
12
|
+
src/dc_sdk/app.py
|
|
13
|
+
src/dc_sdk/cli.py
|
|
14
|
+
src/dc_sdk/errors.py
|
|
15
|
+
src/dc_sdk/handler.py
|
|
16
|
+
src/dc_sdk/types.py
|
|
17
|
+
src/dc_sdk/src/__init__.py
|
|
18
|
+
src/dc_sdk/src/mapping.py
|
|
19
|
+
src/dc_sdk/src/pipeline.py
|
|
20
|
+
src/dc_sdk/src/server.py
|
|
21
|
+
src/dc_sdk/src/models/__init__.py
|
|
22
|
+
src/dc_sdk/src/models/enums.py
|
|
23
|
+
src/dc_sdk/src/models/errors.py
|
|
24
|
+
src/dc_sdk/src/models/log_templates.py
|
|
25
|
+
src/dc_sdk/src/models/pipeline_details.py
|
|
26
|
+
src/dc_sdk/src/services/__init__.py
|
|
27
|
+
src/dc_sdk/src/services/api.py
|
|
28
|
+
src/dc_sdk/src/services/aws.py
|
|
29
|
+
src/dc_sdk/src/services/environment.py
|
|
30
|
+
src/dc_sdk/src/services/loader.py
|
|
31
|
+
src/dc_sdk/src/services/logger.py
|
|
32
|
+
src/dc_sdk/src/services/session.py
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
from dc_sdk.src.services.environment import PipelineEnvironment
|
|
2
|
+
|
|
3
|
+
PipelineEnvironment.validate_environment()
|
|
4
|
+
|
|
5
|
+
import os, sys, json, traceback
|
|
6
|
+
from dc_sdk.src.models.enums import RunStagesEnum, TasksEnum, UpdateRunHistoryActionEnum, EnvironmentVariablesEnum
|
|
7
|
+
from dc_sdk.src.services.api import DataConnectorAPI
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from dc_sdk.src.pipeline import PipelineConductor
|
|
10
|
+
from dc_sdk.src.models.errors import Error
|
|
11
|
+
from dc_sdk.errors import Error as SDKError
|
|
12
|
+
|
|
13
|
+
TASK = PipelineEnvironment.task
|
|
14
|
+
PIPELINE_ID = PipelineEnvironment.pipeline_id
|
|
15
|
+
APP_ENV = PipelineEnvironment.app_env
|
|
16
|
+
api = DataConnectorAPI()
|
|
17
|
+
|
|
18
|
+
is_source = TasksEnum.SOURCE.value == TASK
|
|
19
|
+
print("Initialized Connector task: ", TASK)
|
|
20
|
+
|
|
21
|
+
def run_pipeline():
|
|
22
|
+
try:
|
|
23
|
+
if PipelineEnvironment.pipeline_run_history_id is None:
|
|
24
|
+
pipeline_run_history_id = api.create_new_history(PipelineEnvironment.pipeline_id)
|
|
25
|
+
PipelineEnvironment.set_pipeline_run_history_id(pipeline_run_history_id)
|
|
26
|
+
|
|
27
|
+
pipeline_conductor = PipelineConductor(PipelineEnvironment.task, pipeline_id=PipelineEnvironment.pipeline_id, pipeline_run_history_id=PipelineEnvironment.pipeline_run_history_id)
|
|
28
|
+
pipeline_conductor.internal_log(pipeline_conductor.log_templates.INTERNAL_CONNECTOR_START.format(pipeline_conductor.pipeline_details.connector_nm))
|
|
29
|
+
except Exception as e:
|
|
30
|
+
error_trace = traceback.format_exc()
|
|
31
|
+
|
|
32
|
+
# Print detailed error information
|
|
33
|
+
print("=== Exception Details ===")
|
|
34
|
+
print(f"Error Type: {type(e).__name__}")
|
|
35
|
+
print(f"Error Message: {str(e)}")
|
|
36
|
+
print("\n=== Full Stack Trace ===")
|
|
37
|
+
print(error_trace)
|
|
38
|
+
|
|
39
|
+
# Get detailed system info
|
|
40
|
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
41
|
+
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
42
|
+
line_no = exc_tb.tb_lineno
|
|
43
|
+
|
|
44
|
+
# Create a more detailed error message
|
|
45
|
+
message = (
|
|
46
|
+
f"An unhandled exception occurred:\n"
|
|
47
|
+
f"Type: {type(e).__name__}\n"
|
|
48
|
+
f"Location: {fname}, line {line_no}\n"
|
|
49
|
+
f"Details: {str(e)}\n"
|
|
50
|
+
f"Arguments: {e.args}\n"
|
|
51
|
+
f"Full trace: {error_trace}"
|
|
52
|
+
)
|
|
53
|
+
api.log("Fatal error occurred", RunStagesEnum.INITIALIZING_SOURCE_STAGE.value if is_source else RunStagesEnum.INITIALIZING_DESTINATION_STAGE.value, TASK, message, True)
|
|
54
|
+
api.log("An unrecognized issue has occurred on our side. Our team will be in contact within 24-48 hours, or try emailing support@dataconnector.com.", RunStagesEnum.INITIALIZING_SOURCE_STAGE.value if is_source else RunStagesEnum.INITIALIZING_DESTINATION_STAGE.value, TASK, e)
|
|
55
|
+
sys.exit(1)
|
|
56
|
+
|
|
57
|
+
fail = False
|
|
58
|
+
unhandled = False
|
|
59
|
+
|
|
60
|
+
# If job was initialized correctly, start actions
|
|
61
|
+
try:
|
|
62
|
+
pipeline_conductor.pipeline_details.increment_stage()
|
|
63
|
+
if TASK == TasksEnum.SOURCE.value:
|
|
64
|
+
pipeline_conductor.authenticate_source()
|
|
65
|
+
pipeline_conductor.pipeline_details.increment_stage()
|
|
66
|
+
pipeline_conductor.get_data()
|
|
67
|
+
|
|
68
|
+
pipeline_conductor.pipeline_details.increment_stage()
|
|
69
|
+
|
|
70
|
+
next_job_id = None
|
|
71
|
+
|
|
72
|
+
if APP_ENV != 'local':
|
|
73
|
+
next_job_id = pipeline_conductor.start_next_connector()
|
|
74
|
+
else:
|
|
75
|
+
raise Error("Next connector not started because connector was ran locally.", "NextNotInvokedError - ", False)
|
|
76
|
+
if PipelineEnvironment.platform == "aws":
|
|
77
|
+
destination_start = {
|
|
78
|
+
"DestinationECSInstanceID": next_job_id,
|
|
79
|
+
"updateAction": UpdateRunHistoryActionEnum.DESTINATION_PIPELINE_START.value
|
|
80
|
+
}
|
|
81
|
+
elif PipelineEnvironment.platform == "azure":
|
|
82
|
+
destination_start = {
|
|
83
|
+
"DestinationAzureExecutionNM": next_job_id,
|
|
84
|
+
"updateAction": UpdateRunHistoryActionEnum.DESTINATION_PIPELINE_START.value
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
pipeline_conductor.update_history(destination_start)
|
|
88
|
+
|
|
89
|
+
else:
|
|
90
|
+
pipeline_conductor.authenticate_destination()
|
|
91
|
+
pipeline_conductor.pipeline_details.increment_stage()
|
|
92
|
+
pipeline_conductor.load_data(PipelineEnvironment.batch_start)
|
|
93
|
+
|
|
94
|
+
except (SDKError, Error) as e:
|
|
95
|
+
print(traceback.format_exc())
|
|
96
|
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
97
|
+
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
98
|
+
print(exc_type, fname, exc_tb.tb_lineno)
|
|
99
|
+
fail = True
|
|
100
|
+
if e.internal:
|
|
101
|
+
pipeline_conductor.internal_error(pipeline_conductor.log_templates.INTERNAL_ERROR_MESSAGE.format(type(e).__name__, e.message), e)
|
|
102
|
+
else:
|
|
103
|
+
pipeline_conductor.error(pipeline_conductor.log_templates.ERROR_MESSAGE.format(e.error_name, e.message), e)
|
|
104
|
+
except Exception as ex:
|
|
105
|
+
fail = True
|
|
106
|
+
error_trace = traceback.format_exc()
|
|
107
|
+
|
|
108
|
+
# Print detailed error information
|
|
109
|
+
print("=== Exception Details ===")
|
|
110
|
+
print(f"Error Type: {type(ex).__name__}")
|
|
111
|
+
print(f"Error Message: {str(ex)}")
|
|
112
|
+
print("\n=== Full Stack Trace ===")
|
|
113
|
+
print(error_trace)
|
|
114
|
+
|
|
115
|
+
# Get detailed system info
|
|
116
|
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
117
|
+
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
118
|
+
line_no = exc_tb.tb_lineno
|
|
119
|
+
|
|
120
|
+
# Create a more detailed error message
|
|
121
|
+
message = (
|
|
122
|
+
f"An unhandled exception occurred:\n"
|
|
123
|
+
f"Type: {type(ex).__name__}\n"
|
|
124
|
+
f"Location: {fname}, line {line_no}\n"
|
|
125
|
+
f"Details: {str(ex)}\n"
|
|
126
|
+
f"Arguments: {ex.args}\n"
|
|
127
|
+
f"Full trace: {error_trace}"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
pipeline_conductor.internal_error(message, error_trace, unhandled=True)
|
|
131
|
+
unhandled = True
|
|
132
|
+
|
|
133
|
+
if is_source:
|
|
134
|
+
end_payload = {
|
|
135
|
+
"StatusCD": 1 if not fail else 3,
|
|
136
|
+
"StatusDSC": "Running" if not fail else "Failed",
|
|
137
|
+
"BytesTransferredNBR": pipeline_conductor.bytes_transferred,
|
|
138
|
+
"RowsRetrievedNBR": pipeline_conductor.row_count,
|
|
139
|
+
"updateAction": UpdateRunHistoryActionEnum.SOURCE_PIPELINE_END.value,
|
|
140
|
+
"internal_error_flg": 1 if unhandled else 0,
|
|
141
|
+
"credentials": json.dumps(pipeline_conductor.connector.credentials)
|
|
142
|
+
}
|
|
143
|
+
else:
|
|
144
|
+
end_payload = {
|
|
145
|
+
"StatusCD": 2 if not fail else 3,
|
|
146
|
+
"StatusDSC": "Finished" if not fail else "Failed",
|
|
147
|
+
"RowsInsertedNBR": pipeline_conductor.row_count,
|
|
148
|
+
"updateAction": UpdateRunHistoryActionEnum.DESTINATION_PIPELINE_END.value,
|
|
149
|
+
"internal_error_flg": 1 if unhandled else 0,
|
|
150
|
+
"credentials": json.dumps(pipeline_conductor.connector.credentials)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
pipeline_conductor.update_history(end_payload)
|
|
154
|
+
|
|
155
|
+
if PipelineEnvironment.connected_container and fail:
|
|
156
|
+
sys.exit(1)
|
|
157
|
+
|
|
158
|
+
def main():
|
|
159
|
+
# TODO: Create new history if environment=local
|
|
160
|
+
try:
|
|
161
|
+
run_pipeline()
|
|
162
|
+
except Exception as e:
|
|
163
|
+
error_trace = traceback.format_exc()
|
|
164
|
+
|
|
165
|
+
# Print detailed error information
|
|
166
|
+
print("=== Exception Details ===")
|
|
167
|
+
print(f"Error Type: {type(e).__name__}")
|
|
168
|
+
print(f"Error Message: {str(e)}")
|
|
169
|
+
print("\n=== Full Stack Trace ===")
|
|
170
|
+
print(error_trace)
|
|
171
|
+
|
|
172
|
+
# Get detailed system info
|
|
173
|
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
174
|
+
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
175
|
+
line_no = exc_tb.tb_lineno
|
|
176
|
+
|
|
177
|
+
# Create a more detailed error message
|
|
178
|
+
message = (
|
|
179
|
+
f"An unhandled exception occurred:\n"
|
|
180
|
+
f"Type: {type(e).__name__}\n"
|
|
181
|
+
f"Location: {fname}, line {line_no}\n"
|
|
182
|
+
f"Details: {str(e)}\n"
|
|
183
|
+
f"Arguments: {e.args}\n"
|
|
184
|
+
f"Full trace: {error_trace}"
|
|
185
|
+
)
|
|
186
|
+
print(message)
|
|
187
|
+
sys.exit(1)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RunStagesEnum(Enum):
|
|
5
|
+
INITIALIZING_SOURCE_STAGE = 1
|
|
6
|
+
AUTHENTICATION_SOURCE_STAGE = 2
|
|
7
|
+
RETRIEVING_DATA_STAGE = 3
|
|
8
|
+
FINISHING_SOURCE_STAGE = 4
|
|
9
|
+
INITIALIZING_DESTINATION_STAGE = 5
|
|
10
|
+
AUTHENTICATION_DESTINATION_STAGE = 6
|
|
11
|
+
LOAD_DATA_STAGE = 7
|
|
12
|
+
DONE = 8
|
|
13
|
+
|
|
14
|
+
class TasksEnum(Enum):
|
|
15
|
+
SOURCE = "SOURCE"
|
|
16
|
+
DESTINATION = "DESTINATION"
|
|
17
|
+
|
|
18
|
+
class UpdateRunHistoryActionEnum(Enum):
|
|
19
|
+
SOURCE_PIPELINE_END = "SOURCE_PIPELINE_END"
|
|
20
|
+
DESTINATION_PIPELINE_START = "DESTINATION_PIPELINE_START"
|
|
21
|
+
DESTINATION_PIPELINE_END = "DESTINATION_PIPELINE_END"
|
|
22
|
+
ROWS_RETRIEVED = "ROWS_RETRIEVED"
|
|
23
|
+
ROWS_INSERTED = "ROWS_INSERTED"
|
|
24
|
+
|
|
25
|
+
class EnvironmentVariablesEnum(Enum):
|
|
26
|
+
TASK = "TASK"
|
|
27
|
+
PIPELINE_ID = "PIPELINE_ID"
|
|
28
|
+
PIPELINE_RUN_HISTORY_ID = "PIPELINE_RUN_HISTORY_ID"
|
|
29
|
+
SERVER_ENDPOINT = "SERVER_ENDPOINT"
|
|
30
|
+
SERVER_API_KEY = "SERVER_API_KEY"
|
|
31
|
+
APP_ENV = "APP_ENV"
|
|
32
|
+
MASTER_HASH = "MASTER_HASH"
|
|
33
|
+
|
|
34
|
+
class ServerEndpointsEnum(Enum):
|
|
35
|
+
pass
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Error(Exception):
|
|
6
|
+
def __init__(self, message, error, internal=True):
|
|
7
|
+
self.message = message
|
|
8
|
+
self.error_name = error
|
|
9
|
+
self.internal = internal
|
|
10
|
+
|
|
11
|
+
def __str__(self):
|
|
12
|
+
return self.error_name + self.message
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AuthenticationError(Error):
|
|
16
|
+
error_name = "Authentication Error - "
|
|
17
|
+
internal = False
|
|
18
|
+
|
|
19
|
+
def __init__(self, message=None):
|
|
20
|
+
"""
|
|
21
|
+
Data Connector error class raised when a connector could not authenticate or other related authentication
|
|
22
|
+
errors. Raising this exception will result in a regular handled error. This error will also be visible in
|
|
23
|
+
the log output.
|
|
24
|
+
|
|
25
|
+
:param message: An error message - WILL BE VISIBLE TO CLIENT
|
|
26
|
+
"""
|
|
27
|
+
self.message = message
|
|
28
|
+
self.error_name = "Authentication Error - "
|
|
29
|
+
self.internal = False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class WhitelistError(Error):
|
|
33
|
+
"""DataConnector error class raised when a connector cannot connect because the user did not whitelist our
|
|
34
|
+
database """
|
|
35
|
+
error_name = "Whitelist Error - "
|
|
36
|
+
internal = False
|
|
37
|
+
|
|
38
|
+
def __init__(self, message=None):
|
|
39
|
+
self.message = message
|
|
40
|
+
self.error_name = "Whitelist Error - "
|
|
41
|
+
self.internal = False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class NoObjectsFoundError(Error):
|
|
45
|
+
"""DataConnector error class raised when the connector returns no objects associated with the user's account"""
|
|
46
|
+
error_name = "No Objects Found Error - "
|
|
47
|
+
internal = False
|
|
48
|
+
|
|
49
|
+
def __init__(self, message=None):
|
|
50
|
+
self.message = message
|
|
51
|
+
self.error_name = "No Objects Found Error - "
|
|
52
|
+
self.internal = False
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class GetObjectsError(Error):
|
|
56
|
+
"""DataConnector error class raised when the connector cannot pull the objects associated with the user's
|
|
57
|
+
account """
|
|
58
|
+
error_name = "Get Objects Error - "
|
|
59
|
+
internal = True
|
|
60
|
+
|
|
61
|
+
def __init__(self, message=None):
|
|
62
|
+
self.message = message
|
|
63
|
+
self.error_name = "Get Objects Error - "
|
|
64
|
+
self.internal = True
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class NoFieldsFoundError(Error):
|
|
68
|
+
"""DataConnector error class raised when the connector does not return any fields associated with the object_id"""
|
|
69
|
+
error_name = "No Fields Found Error - "
|
|
70
|
+
internal = False
|
|
71
|
+
|
|
72
|
+
def __init__(self, object_id, message=None):
|
|
73
|
+
self.object_id = object_id
|
|
74
|
+
self.message = message
|
|
75
|
+
self.error_name = "No Fields Found Error - "
|
|
76
|
+
self.internal = False
|
|
77
|
+
|
|
78
|
+
# def __str__(self):
|
|
79
|
+
# if self.object_id:
|
|
80
|
+
# return "The process was terminated because the object {0} did not contain any fields." \
|
|
81
|
+
# .format(self.object_id)
|
|
82
|
+
# else:
|
|
83
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class GetFieldsError(Error):
|
|
87
|
+
"""DataConnector error class raised when the connector cannot pull the fields associated with the given object_id
|
|
88
|
+
on the user's account """
|
|
89
|
+
error_name = "Get Fields Error - "
|
|
90
|
+
internal = True
|
|
91
|
+
|
|
92
|
+
def __init__(self, object_id, message=None):
|
|
93
|
+
self.object_id = object_id
|
|
94
|
+
self.message = message
|
|
95
|
+
self.error_name = "Get Fields Error - "
|
|
96
|
+
self.internal = True
|
|
97
|
+
|
|
98
|
+
# def __str__(self):
|
|
99
|
+
# if self.object_id:
|
|
100
|
+
# return "The process was terminated because the connector failed to pull the fields in the object " \
|
|
101
|
+
# "{0}.\n\t:{1}".format(self.object_id, self.message)
|
|
102
|
+
# else:
|
|
103
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class BadFieldIDError(Error):
|
|
107
|
+
"""DataConnector error class raised when the field_id does not belong to the given object_id"""
|
|
108
|
+
error_name = "Bad Field ID Error - "
|
|
109
|
+
internal = True
|
|
110
|
+
|
|
111
|
+
def __init__(self, field_id=None, object_id=None, message=None):
|
|
112
|
+
self.field_id = field_id
|
|
113
|
+
self.object_id = object_id
|
|
114
|
+
self.message = message
|
|
115
|
+
self.error_name = "Bad Field ID Error - "
|
|
116
|
+
self.internal = True
|
|
117
|
+
|
|
118
|
+
# def __str__(self):
|
|
119
|
+
# if self.field_id and self.object_id:
|
|
120
|
+
# return "The process was terminated because the field_id ({0}) was not found in the object {1}." \
|
|
121
|
+
# .format(self.field_id, self.object_id)
|
|
122
|
+
# else:
|
|
123
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class FilterDataTypeError(Error):
|
|
127
|
+
"""DataConnector error class raised when datatype of the column that is supposed to be filtered is not date or
|
|
128
|
+
datetime """
|
|
129
|
+
error_name = "Filter Data Type Error - "
|
|
130
|
+
internal = True
|
|
131
|
+
|
|
132
|
+
def __init__(self, datatype=None, field_to_filter=None, message=None):
|
|
133
|
+
self.datatype = datatype
|
|
134
|
+
self.field = field_to_filter
|
|
135
|
+
self.message = message
|
|
136
|
+
self.error_name = "Filter Data Type Error - "
|
|
137
|
+
self.internal = False
|
|
138
|
+
|
|
139
|
+
# def __str__(self):
|
|
140
|
+
# if self.datatype and self.field:
|
|
141
|
+
# return "The process was terminated because the datatype of {0} was invalid for filtering. Datatype: {1}" \
|
|
142
|
+
# .format(self.field, self.datatype)
|
|
143
|
+
# else:
|
|
144
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class BadObjectIDError(Error):
|
|
148
|
+
"""DataConnector error class raised when the object_id is not associated with the user"""
|
|
149
|
+
error_name = "Bad Object ID Error - "
|
|
150
|
+
internal = True
|
|
151
|
+
|
|
152
|
+
def __init__(self, object_id=None, message=None):
|
|
153
|
+
self.object_id = object_id
|
|
154
|
+
self.message = message
|
|
155
|
+
self.error_name = "Bad Object ID Error - "
|
|
156
|
+
self.internal = True
|
|
157
|
+
|
|
158
|
+
# def __str__(self):
|
|
159
|
+
# if self.object_id:
|
|
160
|
+
# return "The process was terminated because the object_id ({0}) could not be found." \
|
|
161
|
+
# .format(self.object_id)
|
|
162
|
+
# else:
|
|
163
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class UpdateMethodNotSupportedError(Error):
|
|
167
|
+
"""DataConnector error class raised when the chosen update method is invalid for the chosen connector"""
|
|
168
|
+
error_name = "Update Method Not Supported Error - "
|
|
169
|
+
internal = True
|
|
170
|
+
|
|
171
|
+
def __init__(self, update_method=None, connector=None, message=None):
|
|
172
|
+
self.update_method = update_method
|
|
173
|
+
self.connector = connector
|
|
174
|
+
self.message = message
|
|
175
|
+
self.error_name = "Update Method Not Supported Error - "
|
|
176
|
+
self.internal = True
|
|
177
|
+
|
|
178
|
+
# def __str__(self):
|
|
179
|
+
# if self.update_method and self.connector:
|
|
180
|
+
# return "The process was terminated because the update method {0} is not supported by the {1} connector." \
|
|
181
|
+
# .format(self.update_method, self.connector)
|
|
182
|
+
# else:
|
|
183
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class MappingError(Error):
|
|
187
|
+
"""DataConnector error class raised when the data cannot be mapped to the given columns"""
|
|
188
|
+
error_name = "Mapping Error - "
|
|
189
|
+
internal = True
|
|
190
|
+
|
|
191
|
+
def __init__(self, message):
|
|
192
|
+
self.message = message
|
|
193
|
+
self.error_name = "Mapping Error - "
|
|
194
|
+
self.internal = False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class DataError(Error):
|
|
198
|
+
"""DataConnector error class raised when the data from the object was unable to be pulled."""
|
|
199
|
+
error_name = "Data Error - "
|
|
200
|
+
internal = True
|
|
201
|
+
|
|
202
|
+
def __init__(self, message):
|
|
203
|
+
self.message = message
|
|
204
|
+
self.error_name = "Data Error - "
|
|
205
|
+
self.internal = False
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
class APIRequestError(Error):
|
|
209
|
+
"""DataConnector error class raised when the connector gets an error code from the API"""
|
|
210
|
+
error_name = "API Request Error - "
|
|
211
|
+
internal = True
|
|
212
|
+
|
|
213
|
+
def __init__(self, error_code_returned=None, message=None):
|
|
214
|
+
self.error_code = error_code_returned
|
|
215
|
+
self.message = message
|
|
216
|
+
self.error_name = "API Request Error - "
|
|
217
|
+
self.internal = False
|
|
218
|
+
|
|
219
|
+
# def __str__(self):
|
|
220
|
+
# if self.error_code:
|
|
221
|
+
# return "The process was terminated because the connector's API returned a {0} error".format(self.error_code)
|
|
222
|
+
# else:
|
|
223
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class APITimeoutError(Error):
|
|
227
|
+
"""DataConnector error class raised when the API takes times out"""
|
|
228
|
+
error_name = "API Timeout Error - "
|
|
229
|
+
internal = True
|
|
230
|
+
|
|
231
|
+
def __init__(self, message=None):
|
|
232
|
+
self.message = message
|
|
233
|
+
self.error_name = "API Timeout Error - "
|
|
234
|
+
self.internal = False
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class FieldDataTypeError(Error):
|
|
238
|
+
"""DataConnector error class raised when the datatype of a field is not supported"""
|
|
239
|
+
error_name = "Field Data Type Error - "
|
|
240
|
+
internal = True
|
|
241
|
+
|
|
242
|
+
def __init__(self, datatype=None, field=None, message=None):
|
|
243
|
+
self.datatype = datatype
|
|
244
|
+
self.field = field
|
|
245
|
+
self.message = message
|
|
246
|
+
self.error_name = "Field Data Type Error - "
|
|
247
|
+
self.internal = False
|
|
248
|
+
|
|
249
|
+
# def __str__(self):
|
|
250
|
+
# if self.datatype and self.field:
|
|
251
|
+
# return "The process was terminated because the datatype of {0} was invalid. Datatype: {1}" \
|
|
252
|
+
# .format(self.field, self.datatype)
|
|
253
|
+
# else:
|
|
254
|
+
# return "The process was terminated because an error occurred:\n\t" + self.message
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class APIPermissionError(Error):
|
|
258
|
+
"""DataConnector error class raised when the connector cannot finish a process it lacks API permissions"""
|
|
259
|
+
error_name = "API Permission Error - "
|
|
260
|
+
internal = True
|
|
261
|
+
|
|
262
|
+
def __init__(self, message=None):
|
|
263
|
+
self.message = message
|
|
264
|
+
self.error_name = "API Permission Error - "
|
|
265
|
+
self.internal = False
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
class LoadDataError(Error):
|
|
269
|
+
"""DataConnector error class raised when the connector cannot finish loading data"""
|
|
270
|
+
error_name = "Load Data Error - "
|
|
271
|
+
internal = True
|
|
272
|
+
|
|
273
|
+
def __init__(self, message=None):
|
|
274
|
+
self.message = message
|
|
275
|
+
self.error_name = "Load Data Error - "
|
|
276
|
+
self.internal = False
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
class NotADestinationError(Error):
|
|
280
|
+
"""DataConnector error class raised when the load_data function is implemented but the connector is not a destination"""
|
|
281
|
+
error_name = "Not A Destination Error - "
|
|
282
|
+
internal = True
|
|
283
|
+
|
|
284
|
+
def __init__(self, message=None):
|
|
285
|
+
self.message = message
|
|
286
|
+
self.error_name = "Not A Destination Error - "
|
|
287
|
+
self.internal = True
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
class NotImplementedError(Error):
|
|
291
|
+
"""DataConnector error class raised when the load_data function is implemented but the connector is not a
|
|
292
|
+
destination """
|
|
293
|
+
error_name = "Not Implemented Error - "
|
|
294
|
+
internal = True
|
|
295
|
+
|
|
296
|
+
def __init__(self, message=None):
|
|
297
|
+
self.message = message
|
|
298
|
+
self.error_name = "Not Implemented Error - "
|
|
299
|
+
self.internal = True
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
class NoRowsFoundError(Error):
|
|
303
|
+
"""DataConnector error class raised when the database returns no rows"""
|
|
304
|
+
error_name = "No Rows Found Error - "
|
|
305
|
+
internal = True
|
|
306
|
+
|
|
307
|
+
def __init__(self, message=None):
|
|
308
|
+
self.message = message
|
|
309
|
+
self.error_name = "No Rows Found Error - "
|
|
310
|
+
self.internal = True
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def external_errors():
|
|
314
|
+
error_list = []
|
|
315
|
+
classes = [cls[1] for cls in inspect.getmembers(sys.modules[__name__],
|
|
316
|
+
lambda member: inspect.isclass(member)
|
|
317
|
+
and member.__module__ == __name__)]
|
|
318
|
+
for error in classes:
|
|
319
|
+
if error.__name__ == "Error":
|
|
320
|
+
continue
|
|
321
|
+
if not error.internal:
|
|
322
|
+
error_list.append(error.__name__)
|
|
323
|
+
return error_list
|