fivetran-connector-sdk 0.7.24.3__py3-none-any.whl → 0.8.19.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fivetran_connector_sdk/__init__.py +482 -96
- {fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/METADATA +2 -2
- {fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/RECORD +6 -7
- {fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/WHEEL +1 -1
- fivetran_connector_sdk-0.7.24.3.dist-info/LICENSE +0 -21
- {fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/entry_points.txt +0 -0
- {fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/top_level.txt +0 -0
@@ -11,6 +11,7 @@ import subprocess
|
|
11
11
|
import sys
|
12
12
|
import time
|
13
13
|
import traceback
|
14
|
+
import re
|
14
15
|
|
15
16
|
from concurrent import futures
|
16
17
|
from datetime import datetime
|
@@ -22,14 +23,14 @@ from fivetran_connector_sdk.protos import common_pb2
|
|
22
23
|
from fivetran_connector_sdk.protos import connector_sdk_pb2
|
23
24
|
from fivetran_connector_sdk.protos import connector_sdk_pb2_grpc
|
24
25
|
|
25
|
-
__version__ = "0.
|
26
|
+
__version__ = "0.8.19.1"
|
26
27
|
|
27
28
|
MAC_OS = "mac"
|
28
29
|
WIN_OS = "windows"
|
29
30
|
LINUX_OS = "linux"
|
30
31
|
|
31
|
-
TESTER_VERSION = "0.24.
|
32
|
-
TESTER_FILENAME = "
|
32
|
+
TESTER_VERSION = "0.24.0807.001"
|
33
|
+
TESTER_FILENAME = "run_sdk_tester.jar"
|
33
34
|
VERSION_FILENAME = "version.txt"
|
34
35
|
UPLOAD_FILENAME = "code.zip"
|
35
36
|
LAST_VERSION_CHECK_FILE = "_last_version_check"
|
@@ -38,6 +39,7 @@ OUTPUT_FILES_DIR = "files"
|
|
38
39
|
ONE_DAY_IN_SEC = 24 * 60 * 60
|
39
40
|
|
40
41
|
EXCLUDED_DIRS = ["__pycache__", "lib", "include", OUTPUT_FILES_DIR]
|
42
|
+
EXCLUDED_PIPREQS_DIRS = ["bin,etc,include,lib,Lib,lib64,Scripts"]
|
41
43
|
|
42
44
|
DEBUGGING = False
|
43
45
|
TABLES = {}
|
@@ -54,28 +56,54 @@ class Logging:
|
|
54
56
|
|
55
57
|
@staticmethod
|
56
58
|
def __log(level: Level, message: str):
|
59
|
+
"""Logs a message with the specified logging level.
|
60
|
+
|
61
|
+
Args:
|
62
|
+
level (Logging.Level): The logging level.
|
63
|
+
message (str): The message to log.
|
64
|
+
"""
|
57
65
|
if DEBUGGING:
|
58
66
|
print(message)
|
59
67
|
else:
|
60
|
-
print(f'{{"level":"{level}", "message": "{message}", "message-origin": "connector_sdk"}}')
|
68
|
+
print(f'{{"level":"{level.name}", "message": "{message}", "message-origin": "connector_sdk"}}')
|
61
69
|
|
62
70
|
@staticmethod
|
63
71
|
def fine(message: str):
|
72
|
+
"""Logs a fine-level message.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
message (str): The message to log.
|
76
|
+
"""
|
64
77
|
if DEBUGGING and Logging.LOG_LEVEL == Logging.Level.FINE:
|
65
78
|
Logging.__log(Logging.Level.FINE, message)
|
66
79
|
|
67
80
|
@staticmethod
|
68
81
|
def info(message: str):
|
82
|
+
"""Logs an info-level message.
|
83
|
+
|
84
|
+
Args:
|
85
|
+
message (str): The message to log.
|
86
|
+
"""
|
69
87
|
if Logging.LOG_LEVEL <= Logging.Level.INFO:
|
70
88
|
Logging.__log(Logging.Level.INFO, message)
|
71
89
|
|
72
90
|
@staticmethod
|
73
91
|
def warning(message: str):
|
92
|
+
"""Logs a warning-level message.
|
93
|
+
|
94
|
+
Args:
|
95
|
+
message (str): The message to log.
|
96
|
+
"""
|
74
97
|
if Logging.LOG_LEVEL <= Logging.Level.WARNING:
|
75
98
|
Logging.__log(Logging.Level.WARNING, message)
|
76
99
|
|
77
100
|
@staticmethod
|
78
101
|
def severe(message: str):
|
102
|
+
"""Logs a severe-level message.
|
103
|
+
|
104
|
+
Args:
|
105
|
+
message (str): The message to log.
|
106
|
+
"""
|
79
107
|
if Logging.LOG_LEVEL == Logging.Level.SEVERE:
|
80
108
|
Logging.__log(Logging.Level.SEVERE, message)
|
81
109
|
|
@@ -83,6 +111,15 @@ class Logging:
|
|
83
111
|
class Operations:
|
84
112
|
@staticmethod
|
85
113
|
def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
|
114
|
+
"""Performs an upsert operation on the specified table with the given data, deleting any existing value with the same primary key.
|
115
|
+
|
116
|
+
Args:
|
117
|
+
table (str): The name of the table.
|
118
|
+
data (dict): The data to upsert.
|
119
|
+
|
120
|
+
Returns:
|
121
|
+
list[connector_sdk_pb2.UpdateResponse]: A list of update responses.
|
122
|
+
"""
|
86
123
|
_yield_check(inspect.stack())
|
87
124
|
|
88
125
|
responses = []
|
@@ -117,6 +154,15 @@ class Operations:
|
|
117
154
|
|
118
155
|
@staticmethod
|
119
156
|
def update(table: str, modified: dict) -> connector_sdk_pb2.UpdateResponse:
|
157
|
+
"""Performs an update operation on the specified table with the given modified data.
|
158
|
+
|
159
|
+
Args:
|
160
|
+
table (str): The name of the table.
|
161
|
+
modified (dict): The modified data.
|
162
|
+
|
163
|
+
Returns:
|
164
|
+
connector_sdk_pb2.UpdateResponse: The update response.
|
165
|
+
"""
|
120
166
|
_yield_check(inspect.stack())
|
121
167
|
|
122
168
|
columns = _get_columns(table)
|
@@ -133,6 +179,15 @@ class Operations:
|
|
133
179
|
|
134
180
|
@staticmethod
|
135
181
|
def delete(table: str, keys: dict) -> connector_sdk_pb2.UpdateResponse:
|
182
|
+
"""Performs a soft delete operation on the specified table with the given keys.
|
183
|
+
|
184
|
+
Args:
|
185
|
+
table (str): The name of the table.
|
186
|
+
keys (dict): The keys to delete.
|
187
|
+
|
188
|
+
Returns:
|
189
|
+
connector_sdk_pb2.UpdateResponse: The delete response.
|
190
|
+
"""
|
136
191
|
_yield_check(inspect.stack())
|
137
192
|
|
138
193
|
columns = _get_columns(table)
|
@@ -147,9 +202,16 @@ class Operations:
|
|
147
202
|
return connector_sdk_pb2.UpdateResponse(
|
148
203
|
operation=connector_sdk_pb2.Operation(record=record))
|
149
204
|
|
150
|
-
|
151
205
|
@staticmethod
|
152
206
|
def checkpoint(state: dict) -> connector_sdk_pb2.UpdateResponse:
|
207
|
+
"""Tries to upload all rows to the data warehouse and save state.
|
208
|
+
|
209
|
+
Args:
|
210
|
+
state (dict): The state to checkpoint.
|
211
|
+
|
212
|
+
Returns:
|
213
|
+
connector_sdk_pb2.UpdateResponse: The checkpoint response.
|
214
|
+
"""
|
153
215
|
_yield_check(inspect.stack())
|
154
216
|
return connector_sdk_pb2.UpdateResponse(
|
155
217
|
operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
|
@@ -157,6 +219,7 @@ class Operations:
|
|
157
219
|
|
158
220
|
|
159
221
|
def check_newer_version():
|
222
|
+
"""Periodically checks for a newer version of the SDK and notifies the user if one is available."""
|
160
223
|
tester_root_dir = _tester_root_dir()
|
161
224
|
last_check_file_path = os.path.join(tester_root_dir, LAST_VERSION_CHECK_FILE)
|
162
225
|
if not os.path.isdir(tester_root_dir):
|
@@ -174,18 +237,27 @@ def check_newer_version():
|
|
174
237
|
obtainer = GetPyPiLatestVersion()
|
175
238
|
latest_version = obtainer('fivetran_connector_sdk')
|
176
239
|
if __version__ < latest_version:
|
177
|
-
print(f"[notice] A new release of 'fivetran-connector-sdk' available: {latest_version}\n" +
|
240
|
+
print(f"[notice] A new release of 'fivetran-connector-sdk' is available: {latest_version}\n" +
|
178
241
|
f"[notice] To update, run: pip install --upgrade fivetran-connector-sdk\n")
|
179
242
|
|
180
243
|
with open(last_check_file_path, 'w') as f_out:
|
181
244
|
f_out.write(f"{int(time.time())}")
|
182
245
|
|
183
246
|
|
184
|
-
def _tester_root_dir():
|
247
|
+
def _tester_root_dir() -> str:
|
248
|
+
"""Returns the root directory for the tester."""
|
185
249
|
return os.path.join(os.path.expanduser("~"), ROOT_LOCATION)
|
186
250
|
|
187
251
|
|
188
252
|
def _get_columns(table: str) -> dict:
|
253
|
+
"""Retrieves the columns for the specified table.
|
254
|
+
|
255
|
+
Args:
|
256
|
+
table (str): The name of the table.
|
257
|
+
|
258
|
+
Returns:
|
259
|
+
dict: The columns for the table.
|
260
|
+
"""
|
189
261
|
columns = {}
|
190
262
|
if table in TABLES:
|
191
263
|
for column in TABLES[table].columns:
|
@@ -195,39 +267,46 @@ def _get_columns(table: str) -> dict:
|
|
195
267
|
|
196
268
|
|
197
269
|
def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
270
|
+
"""Maps data to the specified columns.
|
271
|
+
|
272
|
+
Args:
|
273
|
+
data (dict): The data to map.
|
274
|
+
columns (dict): The columns to map the data to.
|
275
|
+
|
276
|
+
Returns:
|
277
|
+
dict: The mapped data.
|
278
|
+
"""
|
198
279
|
mapped_data = {}
|
199
280
|
for k, v in data.items():
|
200
281
|
if v is None:
|
201
282
|
mapped_data[k] = common_pb2.ValueType(null=True)
|
202
|
-
elif isinstance(v, list):
|
203
|
-
raise ValueError("Value type cannot be list")
|
204
283
|
elif (k in columns) and columns[k].type != common_pb2.DataType.UNSPECIFIED:
|
205
284
|
if columns[k].type == common_pb2.DataType.BOOLEAN:
|
206
285
|
mapped_data[k] = common_pb2.ValueType(bool=v)
|
207
286
|
elif columns[k].type == common_pb2.DataType.SHORT:
|
208
287
|
mapped_data[k] = common_pb2.ValueType(short=v)
|
209
|
-
elif columns[k].type ==
|
288
|
+
elif columns[k].type == common_pb2.DataType.INT:
|
210
289
|
mapped_data[k] = common_pb2.ValueType(int=v)
|
211
|
-
elif columns[k].type ==
|
290
|
+
elif columns[k].type == common_pb2.DataType.LONG:
|
212
291
|
mapped_data[k] = common_pb2.ValueType(long=v)
|
213
|
-
elif columns[k].type ==
|
292
|
+
elif columns[k].type == common_pb2.DataType.DECIMAL:
|
214
293
|
mapped_data[k] = common_pb2.ValueType(decimal=v)
|
215
|
-
elif columns[k].type ==
|
294
|
+
elif columns[k].type == common_pb2.DataType.FLOAT:
|
216
295
|
mapped_data[k] = common_pb2.ValueType(float=v)
|
217
|
-
elif columns[k].type ==
|
296
|
+
elif columns[k].type == common_pb2.DataType.DOUBLE:
|
218
297
|
mapped_data[k] = common_pb2.ValueType(double=v)
|
219
|
-
elif columns[k].type ==
|
298
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
|
220
299
|
timestamp = timestamp_pb2.Timestamp()
|
221
300
|
dt = datetime.strptime(v, "%Y-%m-%d")
|
222
301
|
timestamp.FromDatetime(dt)
|
223
302
|
mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
|
224
|
-
elif columns[k].type ==
|
303
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
|
225
304
|
if '.' not in v: v = v + ".0"
|
226
305
|
timestamp = timestamp_pb2.Timestamp()
|
227
306
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
|
228
307
|
timestamp.FromDatetime(dt)
|
229
308
|
mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
|
230
|
-
elif columns[k].type ==
|
309
|
+
elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
|
231
310
|
timestamp = timestamp_pb2.Timestamp()
|
232
311
|
if '.' in v:
|
233
312
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
|
@@ -235,17 +314,17 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
235
314
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
|
236
315
|
timestamp.FromDatetime(dt)
|
237
316
|
mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
|
238
|
-
elif columns[k].type ==
|
317
|
+
elif columns[k].type == common_pb2.DataType.BINARY:
|
239
318
|
mapped_data[k] = common_pb2.ValueType(binary=v)
|
240
|
-
elif columns[k].type ==
|
319
|
+
elif columns[k].type == common_pb2.DataType.XML:
|
241
320
|
mapped_data[k] = common_pb2.ValueType(xml=v)
|
242
|
-
elif columns[k].type ==
|
321
|
+
elif columns[k].type == common_pb2.DataType.STRING:
|
243
322
|
incoming = v if isinstance(v, str) else str(v)
|
244
323
|
mapped_data[k] = common_pb2.ValueType(string=incoming)
|
245
|
-
elif columns[k].type ==
|
324
|
+
elif columns[k].type == common_pb2.DataType.JSON:
|
246
325
|
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
247
326
|
else:
|
248
|
-
raise ValueError(f"
|
327
|
+
raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
|
249
328
|
else:
|
250
329
|
# We can infer type from the value
|
251
330
|
if isinstance(v, int):
|
@@ -260,7 +339,7 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
260
339
|
elif isinstance(v, bytes):
|
261
340
|
mapped_data[k] = common_pb2.ValueType(binary=v)
|
262
341
|
elif isinstance(v, list):
|
263
|
-
raise ValueError("
|
342
|
+
raise ValueError("Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
|
264
343
|
elif isinstance(v, dict):
|
265
344
|
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
266
345
|
elif isinstance(v, str):
|
@@ -273,6 +352,11 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
273
352
|
|
274
353
|
|
275
354
|
def _yield_check(stack):
|
355
|
+
"""Checks for the presence of 'yield' in the calling code.
|
356
|
+
Args:
|
357
|
+
stack: The stack frame to check.
|
358
|
+
"""
|
359
|
+
|
276
360
|
# Known issue with inspect.getmodule() and yield behavior in a frozen application.
|
277
361
|
# When using inspect.getmodule() on stack frames obtained by inspect.stack(), it fails
|
278
362
|
# to resolve the modules in a frozen application due to incompatible assumptions about
|
@@ -286,24 +370,33 @@ def _yield_check(stack):
|
|
286
370
|
calling_code = stack[1].code_context[0]
|
287
371
|
if f"{called_method}(" in calling_code:
|
288
372
|
if 'yield' not in calling_code:
|
289
|
-
print(f"
|
373
|
+
print(f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
|
290
374
|
os._exit(1)
|
291
375
|
else:
|
292
376
|
# This should never happen
|
293
|
-
raise RuntimeError(f"
|
377
|
+
raise RuntimeError(f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
|
378
|
+
|
294
379
|
|
380
|
+
def _check_dict(incoming: dict, string_only: bool = False) -> dict:
|
381
|
+
"""Validates the incoming dictionary.
|
382
|
+
Args:
|
383
|
+
incoming (dict): The dictionary to validate.
|
384
|
+
string_only (bool): Whether to allow only string values.
|
385
|
+
|
386
|
+
Returns:
|
387
|
+
dict: The validated dictionary.
|
388
|
+
"""
|
295
389
|
|
296
|
-
def _check_dict(incoming: dict, string_only: bool = False):
|
297
390
|
if not incoming:
|
298
391
|
return {}
|
299
392
|
|
300
393
|
if not isinstance(incoming, dict):
|
301
|
-
raise ValueError("Configuration
|
394
|
+
raise ValueError("Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
|
302
395
|
|
303
396
|
if string_only:
|
304
397
|
for k, v in incoming.items():
|
305
398
|
if not isinstance(v, str):
|
306
|
-
print("
|
399
|
+
print("SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
|
307
400
|
os._exit(1)
|
308
401
|
|
309
402
|
return incoming
|
@@ -311,38 +404,134 @@ def _check_dict(incoming: dict, string_only: bool = False):
|
|
311
404
|
|
312
405
|
class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
313
406
|
def __init__(self, update, schema=None):
|
407
|
+
"""Initializes the Connector instance.
|
408
|
+
Args:
|
409
|
+
update: The update method.
|
410
|
+
schema: The schema method.
|
411
|
+
"""
|
412
|
+
|
314
413
|
self.schema_method = schema
|
315
414
|
self.update_method = update
|
316
415
|
|
317
416
|
self.configuration = None
|
318
417
|
self.state = None
|
319
418
|
|
320
|
-
# Call this method to unpause and start a connector
|
321
|
-
def start(self, deploy_key: str, group: str, connection: str):
|
322
|
-
if not deploy_key: print("ERROR: Missing deploy key"); os._exit(1)
|
323
|
-
if not connection: print("ERROR: Missing connection name"); os._exit(1)
|
324
|
-
|
325
|
-
group_id, group_name = self.__get_group_info(group, deploy_key)
|
326
|
-
connection_id = self.__get_connection_id(connection, group, group_id, deploy_key)
|
327
|
-
if not self.__unpause_connection():
|
328
|
-
print(f"WARNING: Unable to unpause connection '{connection}'")
|
329
|
-
os._exit(1)
|
330
|
-
|
331
|
-
if not self.__force_sync(connection_id, connection, deploy_key):
|
332
|
-
print(f"WARNING: Unable to start sync on connection '{connection}'")
|
333
|
-
os._exit(1)
|
334
|
-
|
335
419
|
@staticmethod
|
336
420
|
def __unpause_connection(id: str, deploy_key: str) -> bool:
|
421
|
+
"""Unpauses the connection with the given ID and deployment key.
|
422
|
+
|
423
|
+
Args:
|
424
|
+
id (str): The connection ID.
|
425
|
+
deploy_key (str): The deployment key.
|
426
|
+
|
427
|
+
Returns:
|
428
|
+
bool: True if the connection was successfully unpaused, False otherwise.
|
429
|
+
"""
|
337
430
|
resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
|
338
431
|
headers={"Authorization": f"Basic {deploy_key}"},
|
339
432
|
json={"force": True})
|
340
433
|
return resp.ok
|
341
434
|
|
435
|
+
@staticmethod
|
436
|
+
def fetch_requirements_from_file(file_path: str) -> list[str]:
|
437
|
+
"""Reads a requirements file and returns a list of dependencies.
|
438
|
+
|
439
|
+
Args:
|
440
|
+
file_path (str): The path to the requirements file.
|
441
|
+
|
442
|
+
Returns:
|
443
|
+
list[str]: A list of dependencies as strings.
|
444
|
+
"""
|
445
|
+
with open(file_path, 'r') as f:
|
446
|
+
return f.read().splitlines()
|
447
|
+
|
448
|
+
@staticmethod
|
449
|
+
def fetch_requirements_as_dict(self, file_path: str) -> dict:
|
450
|
+
"""Converts a list of dependencies from the requirements file into a dictionary.
|
451
|
+
|
452
|
+
Args:
|
453
|
+
file_path (str): The path to the requirements file.
|
454
|
+
|
455
|
+
Returns:
|
456
|
+
dict: A dictionary where keys are package names (lowercased) and
|
457
|
+
values are the full dependency strings.
|
458
|
+
"""
|
459
|
+
return {item.split("==")[0].lower(): item.lower() for item in
|
460
|
+
self.fetch_requirements_from_file(file_path)}
|
461
|
+
|
462
|
+
def validate_requirements_file(self, project_path: str, is_deploy: bool):
|
463
|
+
"""Validates the `requirements.txt` file against the project's actual dependencies.
|
464
|
+
|
465
|
+
This method generates a temporary requirements file using `pipreqs`, compares
|
466
|
+
it with the existing `requirements.txt`, and checks for version mismatches,
|
467
|
+
missing dependencies, and unused dependencies. It will issue warnings, errors,
|
468
|
+
or even terminate the process depending on whether it's being run for deployment.
|
469
|
+
|
470
|
+
Args:
|
471
|
+
project_path (str): The path to the project directory containing the `requirements.txt`.
|
472
|
+
is_deploy (bool): If `True`, the method will exit the process on critical errors.
|
473
|
+
|
474
|
+
"""
|
475
|
+
subprocess.run(["pipreqs", "--savepath", "tmp_requirements.txt", "--ignore"] + EXCLUDED_PIPREQS_DIRS, text=True, check=True)
|
476
|
+
tmp_requirements_file_path = os.path.join(project_path, 'tmp_requirements.txt')
|
477
|
+
|
478
|
+
tmp_requirements = self.fetch_requirements_as_dict(self, tmp_requirements_file_path)
|
479
|
+
tmp_requirements.pop("fivetran_connector_sdk")
|
480
|
+
os.remove(tmp_requirements_file_path)
|
481
|
+
|
482
|
+
if len(tmp_requirements) > 0:
|
483
|
+
if os.path.exists("requirements.txt"):
|
484
|
+
requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
|
485
|
+
else:
|
486
|
+
with open("requirements.txt", 'w'):
|
487
|
+
pass
|
488
|
+
requirements = {}
|
489
|
+
print("WARNING: Adding `requirements.txt` file to your project folder.")
|
490
|
+
|
491
|
+
version_mismatch_deps = {key: tmp_requirements[key] for key in
|
492
|
+
(requirements.keys() & tmp_requirements.keys())
|
493
|
+
if requirements[key] != tmp_requirements[key]}
|
494
|
+
if version_mismatch_deps:
|
495
|
+
print("WARNING: We recommend using the current stable version for the following:")
|
496
|
+
print(version_mismatch_deps)
|
497
|
+
|
498
|
+
missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
|
499
|
+
if missing_deps:
|
500
|
+
log_level = "ERROR" if is_deploy else "WARNING"
|
501
|
+
print(log_level +
|
502
|
+
": Please include the following dependency libraries in requirements.txt, to be used by "
|
503
|
+
"Fivetran production. "
|
504
|
+
"For more information, please visit: "
|
505
|
+
"https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
|
506
|
+
"#workingwithrequirementstxtfile")
|
507
|
+
print(*list(missing_deps.values()))
|
508
|
+
if is_deploy:
|
509
|
+
os._exit(1)
|
510
|
+
|
511
|
+
unused_deps = list(requirements.keys() - tmp_requirements.keys())
|
512
|
+
if unused_deps:
|
513
|
+
print("INFO: The following dependencies are not used in connector.py:")
|
514
|
+
print(*unused_deps)
|
515
|
+
else:
|
516
|
+
if os.path.exists("requirements.txt"):
|
517
|
+
print("WARNING: `requirements.txt` is not required as no additional "
|
518
|
+
"Python libraries are required for your code.")
|
519
|
+
|
520
|
+
if is_deploy: print("Successful validation of requirements.txt")
|
521
|
+
|
342
522
|
# Call this method to deploy the connector to Fivetran platform
|
343
523
|
def deploy(self, project_path: str, deploy_key: str, group: str, connection: str, configuration: dict = None):
|
344
|
-
|
345
|
-
|
524
|
+
"""Deploys the connector to the Fivetran platform.
|
525
|
+
|
526
|
+
Args:
|
527
|
+
project_path (str): The path to the connector project.
|
528
|
+
deploy_key (str): The deployment key.
|
529
|
+
group (str): The group name.
|
530
|
+
connection (str): The connection name.
|
531
|
+
configuration (dict): The configuration dictionary.
|
532
|
+
"""
|
533
|
+
if not deploy_key: print("SEVERE: The Fivetran API key is missing. Please provide a valid Fivetran API key to create the connector."); os._exit(1)
|
534
|
+
if not connection: print("SEVERE: The connection name is missing. Please provide a valid connection name to create the connector."); os._exit(1)
|
346
535
|
_check_dict(configuration)
|
347
536
|
|
348
537
|
secrets_list = []
|
@@ -356,28 +545,40 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
356
545
|
"sync_method": "DIRECT",
|
357
546
|
"custom_payloads": [],
|
358
547
|
}
|
548
|
+
|
549
|
+
self.validate_requirements_file(project_path, True)
|
550
|
+
|
359
551
|
group_id, group_name = self.__get_group_info(group, deploy_key)
|
360
|
-
print(f"Deploying '{project_path}' to '{
|
552
|
+
print(f"INFO: Deploying '{project_path}' to connector '{connection}' in destination '{group_name}'.\n")
|
361
553
|
upload_file_path = self.__create_upload_file(project_path)
|
362
|
-
upload_result = self.__upload(upload_file_path, deploy_key,group_id,connection)
|
554
|
+
upload_result = self.__upload(upload_file_path, deploy_key, group_id, connection)
|
363
555
|
os.remove(upload_file_path)
|
364
556
|
if not upload_result:
|
365
557
|
os._exit(1)
|
366
558
|
connection_id = self.__get_connection_id(connection, group, group_id, deploy_key)
|
367
559
|
if connection_id:
|
368
|
-
print(f"
|
560
|
+
print(f"INFO: The connection '{connection}' already exists in destination '{group}', updating the existing connector... ", end="", flush=True)
|
369
561
|
self.__update_connection(connection_id, connection, group_name, connection_config, deploy_key)
|
370
562
|
print("✓")
|
371
563
|
else:
|
372
564
|
response = self.__create_connection(deploy_key, group_id, connection_config)
|
373
565
|
if response.ok:
|
374
|
-
print(f"
|
566
|
+
print(f"INFO: Connection named '{connection}' has been created successfully.\n")
|
375
567
|
else:
|
376
|
-
print(f"
|
568
|
+
print(f"SEVERE: Unable to create a new Connection, failed with error: {response.json()['message']}")
|
377
569
|
os._exit(1)
|
378
570
|
|
379
571
|
@staticmethod
|
380
572
|
def __force_sync(id: str, deploy_key: str) -> bool:
|
573
|
+
"""Forces a sync operation on the connection with the given ID and deployment key.
|
574
|
+
|
575
|
+
Args:
|
576
|
+
id (str): The connection ID.
|
577
|
+
deploy_key (str): The deployment key.
|
578
|
+
|
579
|
+
Returns:
|
580
|
+
bool: True if the sync was successfully started, False otherwise.
|
581
|
+
"""
|
381
582
|
resp = rq.post(f"https://api.fivetran.com/v1/connectors/{id}/sync",
|
382
583
|
headers={"Authorization": f"Basic {deploy_key}"},
|
383
584
|
json={"force": True})
|
@@ -385,6 +586,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
385
586
|
|
386
587
|
@staticmethod
|
387
588
|
def __update_connection(id: str, name: str, group: str, config: dict, deploy_key: str):
|
589
|
+
"""Updates the connection with the given ID, name, group, configuration, and deployment key.
|
590
|
+
|
591
|
+
Args:
|
592
|
+
id (str): The connection ID.
|
593
|
+
name (str): The connection name.
|
594
|
+
group (str): The group name.
|
595
|
+
config (dict): The configuration dictionary.
|
596
|
+
deploy_key (str): The deployment key.
|
597
|
+
"""
|
388
598
|
if not config["secrets_list"]:
|
389
599
|
del config["secrets_list"]
|
390
600
|
|
@@ -396,16 +606,27 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
396
606
|
})
|
397
607
|
|
398
608
|
if not resp.ok:
|
399
|
-
print(f"
|
609
|
+
print(f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{response.json()['message']}'.")
|
400
610
|
os._exit(1)
|
401
611
|
|
402
612
|
@staticmethod
|
403
|
-
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str):
|
613
|
+
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> str | None:
|
614
|
+
"""Retrieves the connection ID for the specified connection schema name, group, and deployment key.
|
615
|
+
|
616
|
+
Args:
|
617
|
+
name (str): The connection name.
|
618
|
+
group (str): The group name.
|
619
|
+
group_id (str): The group ID.
|
620
|
+
deploy_key (str): The deployment key.
|
621
|
+
|
622
|
+
Returns:
|
623
|
+
str: The connection ID, or None
|
624
|
+
"""
|
404
625
|
resp = rq.get(f"https://api.fivetran.com/v1/groups/{group_id}/connectors",
|
405
626
|
headers={"Authorization": f"Basic {deploy_key}"},
|
406
627
|
params={"schema": name})
|
407
628
|
if not resp.ok:
|
408
|
-
print(f"
|
629
|
+
print(f"SEVERE: Unable to fetch connection list in destination '{group}'")
|
409
630
|
os._exit(1)
|
410
631
|
|
411
632
|
if resp.json()['data']['items']:
|
@@ -414,7 +635,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
414
635
|
return None
|
415
636
|
|
416
637
|
@staticmethod
|
417
|
-
def __create_connection(deploy_key: str, group_id: str, config: dict):
|
638
|
+
def __create_connection(deploy_key: str, group_id: str, config: dict) -> rq.Response:
|
639
|
+
"""Creates a new connection with the given deployment key, group ID, and configuration.
|
640
|
+
|
641
|
+
Args:
|
642
|
+
deploy_key (str): The deployment key.
|
643
|
+
group_id (str): The group ID.
|
644
|
+
config (dict): The configuration dictionary.
|
645
|
+
|
646
|
+
Returns:
|
647
|
+
rq.Response: The response object.
|
648
|
+
"""
|
418
649
|
response = rq.post(f"https://api.fivetran.com/v1/connectors",
|
419
650
|
headers={"Authorization": f"Basic {deploy_key}"},
|
420
651
|
json={
|
@@ -428,24 +659,54 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
428
659
|
return response
|
429
660
|
|
430
661
|
def __create_upload_file(self, project_path: str) -> str:
|
431
|
-
|
662
|
+
"""Creates an upload file for the given project path.
|
663
|
+
|
664
|
+
Args:
|
665
|
+
project_path (str): The path to the project.
|
666
|
+
|
667
|
+
Returns:
|
668
|
+
str: The path to the upload file.
|
669
|
+
"""
|
670
|
+
print("INFO: Packaging your project for upload...")
|
432
671
|
zip_file_path = self.__zip_folder(project_path)
|
433
672
|
print("✓")
|
434
673
|
return zip_file_path
|
435
674
|
|
436
675
|
def __zip_folder(self, project_path: str) -> str:
|
676
|
+
"""Zips the folder at the given project path.
|
677
|
+
|
678
|
+
Args:
|
679
|
+
project_path (str): The path to the project.
|
680
|
+
|
681
|
+
Returns:
|
682
|
+
str: The path to the zip file.
|
683
|
+
"""
|
437
684
|
upload_filepath = os.path.join(project_path, UPLOAD_FILENAME)
|
685
|
+
connector_file_exists = False
|
438
686
|
|
439
687
|
with ZipFile(upload_filepath, 'w', ZIP_DEFLATED) as zipf:
|
440
688
|
for root, files in self.__dir_walker(project_path):
|
441
689
|
for file in files:
|
690
|
+
if file == "connector.py":
|
691
|
+
connector_file_exists = True
|
442
692
|
file_path = os.path.join(root, file)
|
443
693
|
arcname = os.path.relpath(file_path, project_path)
|
444
694
|
zipf.write(file_path, arcname)
|
445
695
|
|
696
|
+
if not connector_file_exists:
|
697
|
+
print("SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
|
698
|
+
os._exit(1)
|
446
699
|
return upload_filepath
|
447
700
|
|
448
701
|
def __dir_walker(self, top):
|
702
|
+
"""Walks the directory tree starting at the given top directory.
|
703
|
+
|
704
|
+
Args:
|
705
|
+
top (str): The top directory to start the walk.
|
706
|
+
|
707
|
+
Yields:
|
708
|
+
tuple: A tuple containing the current directory path and a list of files.
|
709
|
+
"""
|
449
710
|
dirs, files = [], []
|
450
711
|
for name in os.listdir(top):
|
451
712
|
path = os.path.join(top, name)
|
@@ -464,7 +725,18 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
464
725
|
|
465
726
|
@staticmethod
|
466
727
|
def __upload(local_path: str, deploy_key: str, group_id: str, connection: str) -> bool:
|
467
|
-
|
728
|
+
"""Uploads the local code file for the specified group and connection.
|
729
|
+
|
730
|
+
Args:
|
731
|
+
local_path (str): The local file path.
|
732
|
+
deploy_key (str): The deployment key.
|
733
|
+
group_id (str): The group ID.
|
734
|
+
connection (str): The connection name.
|
735
|
+
|
736
|
+
Returns:
|
737
|
+
bool: True if the upload was successful, False otherwise.
|
738
|
+
"""
|
739
|
+
print("INFO: Uploading your project...", end="", flush=True)
|
468
740
|
response = rq.post(f"https://api.fivetran.com/v2/deploy/{group_id}/{connection}",
|
469
741
|
files={'file': open(local_path, 'rb')},
|
470
742
|
headers={"Authorization": f"Basic {deploy_key}"})
|
@@ -472,11 +744,16 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
472
744
|
print("✓")
|
473
745
|
return True
|
474
746
|
|
475
|
-
print("
|
747
|
+
print("SEVERE: Unable to upload the project, failed with error: ", response.reason)
|
476
748
|
return False
|
477
749
|
|
478
750
|
@staticmethod
|
479
751
|
def __get_os_name() -> str:
|
752
|
+
"""Returns the name of the operating system.
|
753
|
+
|
754
|
+
Returns:
|
755
|
+
str: The name of the operating system.
|
756
|
+
"""
|
480
757
|
os_sysname = platform.system().lower()
|
481
758
|
if os_sysname.startswith("darwin"):
|
482
759
|
return MAC_OS
|
@@ -488,31 +765,40 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
488
765
|
|
489
766
|
@staticmethod
|
490
767
|
def __get_group_info(group: str, deploy_key: str) -> tuple[str, str]:
|
768
|
+
"""Retrieves the group information for the specified group and deployment key.
|
769
|
+
|
770
|
+
Args:
|
771
|
+
group (str): The group name.
|
772
|
+
deploy_key (str): The deployment key.
|
773
|
+
|
774
|
+
Returns:
|
775
|
+
tuple[str, str]: A tuple containing the group ID and group name.
|
776
|
+
"""
|
491
777
|
resp = rq.get("https://api.fivetran.com/v1/groups",
|
492
778
|
headers={"Authorization": f"Basic {deploy_key}"})
|
493
779
|
|
494
780
|
if not resp.ok:
|
495
|
-
print(f"
|
781
|
+
print(f"SEVERE: Unable to fetch list of destination names, status code = {resp.status_code}")
|
496
782
|
os._exit(1)
|
497
783
|
|
498
784
|
# TODO: Do we need to implement pagination?
|
499
785
|
groups = resp.json()['data']['items']
|
500
786
|
if not groups:
|
501
|
-
print("
|
787
|
+
print("SEVERE: No destinations defined in the account")
|
502
788
|
os._exit(1)
|
503
789
|
|
504
790
|
if len(groups) == 1:
|
505
791
|
return groups[0]['id'], groups[0]['name']
|
506
792
|
else:
|
507
793
|
if not group:
|
508
|
-
print("
|
794
|
+
print("SEVERE: Destination name is required when there are multiple destinations in the account")
|
509
795
|
os._exit(1)
|
510
796
|
|
511
797
|
for grp in groups:
|
512
798
|
if grp['name'] == group:
|
513
799
|
return grp['id'], grp['name']
|
514
800
|
|
515
|
-
print(f"
|
801
|
+
print(f"SEVERE: Specified destination was not found in the account: {group}")
|
516
802
|
os._exit(1)
|
517
803
|
|
518
804
|
# Call this method to run the connector in production
|
@@ -521,6 +807,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
521
807
|
configuration: dict = None,
|
522
808
|
state: dict = None,
|
523
809
|
log_level: Logging.Level = Logging.Level.INFO) -> grpc.Server:
|
810
|
+
"""Runs the connector server.
|
811
|
+
|
812
|
+
Args:
|
813
|
+
port (int): The port number to listen for incoming requests.
|
814
|
+
configuration (dict): The configuration dictionary.
|
815
|
+
state (dict): The state dictionary.
|
816
|
+
log_level (Logging.Level): The logging level.
|
817
|
+
|
818
|
+
Returns:
|
819
|
+
grpc.Server: The gRPC server instance.
|
820
|
+
"""
|
524
821
|
self.configuration = _check_dict(configuration, True)
|
525
822
|
self.state = _check_dict(state)
|
526
823
|
Logging.LOG_LEVEL = log_level
|
@@ -529,7 +826,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
529
826
|
connector_sdk_pb2_grpc.add_ConnectorServicer_to_server(self, server)
|
530
827
|
server.add_insecure_port("[::]:" + str(port))
|
531
828
|
server.start()
|
532
|
-
print("Connector started, listening on " + str(port))
|
533
829
|
if DEBUGGING:
|
534
830
|
return server
|
535
831
|
server.wait_for_termination()
|
@@ -541,6 +837,18 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
541
837
|
configuration: dict = None,
|
542
838
|
state: dict = None,
|
543
839
|
log_level: Logging.Level = Logging.Level.FINE) -> bool:
|
840
|
+
"""Tests the connector code by running it with the connector tester.
|
841
|
+
|
842
|
+
Args:
|
843
|
+
project_path (str): The path to the project.
|
844
|
+
port (int): The port number to listen for incoming requests.
|
845
|
+
configuration (dict): The configuration dictionary.
|
846
|
+
state (dict): The state dictionary.
|
847
|
+
log_level (Logging.Level): The logging level.
|
848
|
+
|
849
|
+
Returns:
|
850
|
+
bool: True if there was an error, False otherwise.
|
851
|
+
"""
|
544
852
|
global DEBUGGING
|
545
853
|
DEBUGGING = True
|
546
854
|
|
@@ -568,17 +876,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
568
876
|
download_filename = f"sdk-connector-tester-{os_name}-{TESTER_VERSION}.zip"
|
569
877
|
download_filepath = os.path.join(tester_root_dir, download_filename)
|
570
878
|
try:
|
571
|
-
print(f"Downloading connector tester version {TESTER_VERSION}
|
879
|
+
print(f"INFO: Downloading connector tester version: {TESTER_VERSION} ", end="", flush=True)
|
572
880
|
download_url = f"https://github.com/fivetran/fivetran_sdk_tools/releases/download/{TESTER_VERSION}/{download_filename}"
|
573
881
|
r = rq.get(download_url)
|
574
882
|
if r.ok:
|
575
883
|
with open(download_filepath, 'wb') as fo:
|
576
884
|
fo.write(r.content)
|
577
885
|
else:
|
578
|
-
print(f"\
|
886
|
+
print(f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or try again later ( status code: {r.status_code}), url: {download_url}")
|
579
887
|
os._exit(1)
|
580
888
|
except:
|
581
|
-
print(f"\
|
889
|
+
print(f"\nSEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
|
582
890
|
os._exit(1)
|
583
891
|
|
584
892
|
try:
|
@@ -593,21 +901,22 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
593
901
|
os.chmod(java_exe, st.st_mode | stat.S_IEXEC)
|
594
902
|
print("✓")
|
595
903
|
except:
|
596
|
-
print(f"\
|
904
|
+
print(f"\nSEVERE: Failed to install the connector tester. Error details: ", traceback.format_exc())
|
597
905
|
shutil.rmtree(tester_root_dir)
|
598
906
|
os._exit(1)
|
599
907
|
|
600
908
|
project_path = os.getcwd() if project_path is None else project_path
|
601
|
-
|
909
|
+
self.validate_requirements_file(project_path, False)
|
910
|
+
print(f"INFO: Debugging connector at: {project_path}")
|
602
911
|
server = self.run(port, configuration, state, log_level=log_level)
|
603
912
|
|
604
913
|
# Uncomment this to run the tester manually
|
605
|
-
#server.wait_for_termination()
|
914
|
+
# server.wait_for_termination()
|
606
915
|
|
607
916
|
error = False
|
608
917
|
try:
|
609
|
-
print(f"
|
610
|
-
for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path):
|
918
|
+
print(f"INFO: Running connector tester...")
|
919
|
+
for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path, port):
|
611
920
|
print(log_msg, end="")
|
612
921
|
except:
|
613
922
|
print(traceback.format_exc())
|
@@ -618,12 +927,51 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
618
927
|
return error
|
619
928
|
|
620
929
|
@staticmethod
|
621
|
-
def __java_exe(location: str, os_name: str):
|
930
|
+
def __java_exe(location: str, os_name: str) -> str:
|
931
|
+
"""Returns the path to the Java executable.
|
932
|
+
|
933
|
+
Args:
|
934
|
+
location (str): The location of the Java executable.
|
935
|
+
os_name (str): The name of the operating system.
|
936
|
+
|
937
|
+
Returns:
|
938
|
+
str: The path to the Java executable.
|
939
|
+
"""
|
622
940
|
java_exe_base = os.path.join(location, "bin", "java")
|
623
941
|
return f"{java_exe_base}.exe" if os_name == WIN_OS else java_exe_base
|
624
942
|
|
625
943
|
@staticmethod
|
626
|
-
def
|
944
|
+
def process_stream(stream):
|
945
|
+
"""Processes a stream of text lines, replacing occurrences of a specified pattern.
|
946
|
+
|
947
|
+
This method reads each line from the provided stream, searches for occurrences of
|
948
|
+
a predefined pattern, and replaces them with a specified replacement string.
|
949
|
+
|
950
|
+
Args:
|
951
|
+
stream (iterable): An iterable stream of text lines, typically from a file or another input source.
|
952
|
+
|
953
|
+
Yields:
|
954
|
+
str: Each line from the stream after replacing the matched pattern with the replacement string.
|
955
|
+
"""
|
956
|
+
pattern = re.compile(r'com\.fivetran\.fivetran_sdk\.tools\.testers\.\S+')
|
957
|
+
replacement = 'Fivetran SDK Tester'
|
958
|
+
|
959
|
+
for line in iter(stream.readline, ""):
|
960
|
+
modified_line = pattern.sub(replacement, line)
|
961
|
+
yield modified_line
|
962
|
+
|
963
|
+
@staticmethod
|
964
|
+
def __run_tester(java_exe: str, root_dir: str, project_path: str, port: int = 50051):
|
965
|
+
"""Runs the connector tester.
|
966
|
+
|
967
|
+
Args:
|
968
|
+
java_exe (str): The path to the Java executable.
|
969
|
+
root_dir (str): The root directory.
|
970
|
+
project_path (str): The path to the project.
|
971
|
+
|
972
|
+
Yields:
|
973
|
+
str: The log messages from the tester.
|
974
|
+
"""
|
627
975
|
working_dir = os.path.join(project_path, OUTPUT_FILES_DIR)
|
628
976
|
try:
|
629
977
|
os.mkdir(working_dir)
|
@@ -634,11 +982,16 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
634
982
|
"-jar",
|
635
983
|
os.path.join(root_dir, TESTER_FILENAME),
|
636
984
|
"--connector-sdk=true",
|
637
|
-
f"--
|
985
|
+
f"--port={port}",
|
986
|
+
f"--working-dir={working_dir}",
|
987
|
+
"--tester-type=source"]
|
988
|
+
|
989
|
+
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
990
|
+
for line in Connector.process_stream(popen.stderr):
|
991
|
+
yield line
|
638
992
|
|
639
|
-
|
640
|
-
|
641
|
-
yield stdout_line
|
993
|
+
for line in Connector.process_stream(popen.stdout):
|
994
|
+
yield line
|
642
995
|
popen.stdout.close()
|
643
996
|
return_code = popen.wait()
|
644
997
|
if return_code:
|
@@ -646,6 +999,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
646
999
|
|
647
1000
|
# -- Methods below override ConnectorServicer methods
|
648
1001
|
def ConfigurationForm(self, request, context):
|
1002
|
+
"""Overrides the ConfigurationForm method from ConnectorServicer.
|
1003
|
+
|
1004
|
+
Args:
|
1005
|
+
request: The gRPC request.
|
1006
|
+
context: The gRPC context.
|
1007
|
+
|
1008
|
+
Returns:
|
1009
|
+
common_pb2.ConfigurationFormResponse: An empty configuration form response.
|
1010
|
+
"""
|
649
1011
|
if not self.configuration:
|
650
1012
|
self.configuration = {}
|
651
1013
|
|
@@ -653,9 +1015,27 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
653
1015
|
return common_pb2.ConfigurationFormResponse()
|
654
1016
|
|
655
1017
|
def Test(self, request, context):
|
1018
|
+
"""Overrides the Test method from ConnectorServicer.
|
1019
|
+
|
1020
|
+
Args:
|
1021
|
+
request: The gRPC request.
|
1022
|
+
context: The gRPC context.
|
1023
|
+
|
1024
|
+
Returns:
|
1025
|
+
None: As this method is not implemented.
|
1026
|
+
"""
|
656
1027
|
return None
|
657
1028
|
|
658
1029
|
def Schema(self, request, context):
|
1030
|
+
"""Overrides the Schema method from ConnectorServicer.
|
1031
|
+
|
1032
|
+
Args:
|
1033
|
+
request: The gRPC request.
|
1034
|
+
context: The gRPC context.
|
1035
|
+
|
1036
|
+
Returns:
|
1037
|
+
connector_sdk_pb2.SchemaResponse: The schema response.
|
1038
|
+
"""
|
659
1039
|
global TABLES
|
660
1040
|
|
661
1041
|
if not self.schema_method:
|
@@ -718,7 +1098,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
718
1098
|
elif type.upper() == "JSON":
|
719
1099
|
column.type = common_pb2.DataType.JSON
|
720
1100
|
else:
|
721
|
-
raise ValueError("Unrecognized column type
|
1101
|
+
raise ValueError("Unrecognized column type encountered:: ", str(type))
|
722
1102
|
|
723
1103
|
elif isinstance(type, dict):
|
724
1104
|
if type['type'].upper() != "DECIMAL":
|
@@ -741,6 +1121,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
741
1121
|
return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
|
742
1122
|
|
743
1123
|
def Update(self, request, context):
|
1124
|
+
"""Overrides the Update method from ConnectorServicer.
|
1125
|
+
|
1126
|
+
Args:
|
1127
|
+
request: The gRPC request.
|
1128
|
+
context: The gRPC context.
|
1129
|
+
|
1130
|
+
Yields:
|
1131
|
+
connector_sdk_pb2.UpdateResponse: The update response.
|
1132
|
+
"""
|
744
1133
|
configuration = self.configuration if self.configuration else request.configuration
|
745
1134
|
state = self.state if self.state else json.loads(request.state_json)
|
746
1135
|
|
@@ -757,7 +1146,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
757
1146
|
raise e
|
758
1147
|
|
759
1148
|
|
760
|
-
def find_connector_object(project_path):
|
1149
|
+
def find_connector_object(project_path) -> Connector:
|
1150
|
+
"""Finds the connector object in the given project path.
|
1151
|
+
Args:
|
1152
|
+
project_path (str): The path to the project.
|
1153
|
+
|
1154
|
+
Returns:
|
1155
|
+
object: The connector object.
|
1156
|
+
"""
|
1157
|
+
|
761
1158
|
module_name = "connector_connector_code"
|
762
1159
|
connector_py = os.path.join(project_path, "connector.py")
|
763
1160
|
spec = importlib.util.spec_from_file_location(module_name, connector_py)
|
@@ -770,15 +1167,19 @@ def find_connector_object(project_path):
|
|
770
1167
|
if '<fivetran_connector_sdk.Connector object at' in str(obj_attr):
|
771
1168
|
return obj_attr
|
772
1169
|
|
773
|
-
print("
|
1170
|
+
print("SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
|
774
1171
|
sys.exit(1)
|
775
1172
|
|
776
1173
|
|
777
1174
|
def main():
|
1175
|
+
"""The main entry point for the script.
|
1176
|
+
Parses command line arguments and passes them to connector object methods
|
1177
|
+
"""
|
1178
|
+
|
778
1179
|
parser = argparse.ArgumentParser(allow_abbrev=False)
|
779
1180
|
|
780
1181
|
# Positional
|
781
|
-
parser.add_argument("command", help="debug|
|
1182
|
+
parser.add_argument("command", help="debug|deploy")
|
782
1183
|
parser.add_argument("project_path", nargs='?', default=os.getcwd(), help="Path to connector project directory")
|
783
1184
|
|
784
1185
|
# Optional (Not all of these are valid with every mutually exclusive option below)
|
@@ -806,7 +1207,7 @@ def main():
|
|
806
1207
|
with open(json_filepath, 'r') as fi:
|
807
1208
|
configuration = json.load(fi)
|
808
1209
|
else:
|
809
|
-
raise ValueError("Configuration
|
1210
|
+
raise ValueError("Configuration must be provided as a JSON file. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
|
810
1211
|
else:
|
811
1212
|
configuration = {}
|
812
1213
|
|
@@ -827,25 +1228,10 @@ def main():
|
|
827
1228
|
print("WARNING: 'state' parameter is not used for 'deploy' command")
|
828
1229
|
connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
|
829
1230
|
|
830
|
-
elif args.command.lower() == "start":
|
831
|
-
if args.port:
|
832
|
-
print("WARNING: 'port' parameter is not used for 'deploy' command")
|
833
|
-
if args.state:
|
834
|
-
print("WARNING: 'state' parameter is not used for 'deploy' command")
|
835
|
-
connector_object.start(ft_deploy_key, ft_group, ft_connection)
|
836
|
-
|
837
1231
|
elif args.command.lower() == "debug":
|
838
1232
|
port = 50051 if not args.port else args.port
|
839
1233
|
connector_object.debug(args.project_path, port, configuration, state)
|
840
1234
|
|
841
|
-
elif args.command.lower() == "run":
|
842
|
-
try:
|
843
|
-
port = 50051 if not args.port else args.port
|
844
|
-
connector_object.run(port, configuration, state)
|
845
|
-
except:
|
846
|
-
Logging.severe(traceback.format_exc())
|
847
|
-
os._exit(1)
|
848
|
-
|
849
1235
|
else:
|
850
1236
|
raise NotImplementedError("Invalid command: ", args.command)
|
851
1237
|
|
{fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fivetran_connector_sdk
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.8.19.1
|
4
4
|
Summary: Build custom connectors on Fivetran platform
|
5
5
|
Author-email: Fivetran <developers@fivetran.com>
|
6
6
|
Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
|
@@ -10,9 +10,9 @@ Classifier: License :: OSI Approved :: MIT License
|
|
10
10
|
Classifier: Operating System :: OS Independent
|
11
11
|
Requires-Python: >=3.9
|
12
12
|
Description-Content-Type: text/markdown
|
13
|
-
License-File: LICENSE
|
14
13
|
Requires-Dist: grpcio ==1.60.1
|
15
14
|
Requires-Dist: grpcio-tools ==1.60.1
|
16
15
|
Requires-Dist: requests ==2.31.0
|
17
16
|
Requires-Dist: get-pypi-latest-version ==0.0.12
|
17
|
+
Requires-Dist: pipreqs ==0.5.0
|
18
18
|
|
{fivetran_connector_sdk-0.7.24.3.dist-info → fivetran_connector_sdk-0.8.19.1.dist-info}/RECORD
RENAMED
@@ -1,4 +1,4 @@
|
|
1
|
-
fivetran_connector_sdk/__init__.py,sha256=
|
1
|
+
fivetran_connector_sdk/__init__.py,sha256=DRdHyxqKeBGsvwYcqe2aCmM3AlVCiqVjbLQvRkfFVs0,49915
|
2
2
|
fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
fivetran_connector_sdk/protos/common_pb2.py,sha256=kUwVcyZHgLigNR-KnHZn7dHrlxaMnUXqzprsRx6T72M,6831
|
4
4
|
fivetran_connector_sdk/protos/common_pb2.pyi,sha256=S0hdIzoXyyOKD5cjiGeDDLYpQ9J3LjAvu4rCj1JvJWE,9038
|
@@ -6,9 +6,8 @@ fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXH
|
|
6
6
|
fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=9Ke_Ti1s0vAeXapfXT-EryrT2-TSGQb8mhs4gxTpUMk,7732
|
7
7
|
fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=FWYxRgshEF3QDYAE0TM_mv4N2gGvkxCH_uPpxnMc4oA,8406
|
8
8
|
fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=ZfJLp4DW7uP4pFOZ74s_wQ6tD3eIPi-08UfnLwe4tzo,7163
|
9
|
-
fivetran_connector_sdk-0.
|
10
|
-
fivetran_connector_sdk-0.
|
11
|
-
fivetran_connector_sdk-0.
|
12
|
-
fivetran_connector_sdk-0.
|
13
|
-
fivetran_connector_sdk-0.
|
14
|
-
fivetran_connector_sdk-0.7.24.3.dist-info/RECORD,,
|
9
|
+
fivetran_connector_sdk-0.8.19.1.dist-info/METADATA,sha256=fgkWljNC3yuszXoF9mo6HGx2mw8KvKLAhf6G5uMuxHs,708
|
10
|
+
fivetran_connector_sdk-0.8.19.1.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
|
11
|
+
fivetran_connector_sdk-0.8.19.1.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
|
12
|
+
fivetran_connector_sdk-0.8.19.1.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
|
13
|
+
fivetran_connector_sdk-0.8.19.1.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
MIT License
|
2
|
-
|
3
|
-
Copyright (c) 2023 Fivetran
|
4
|
-
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
7
|
-
in the Software without restriction, including without limitation the rights
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
10
|
-
furnished to do so, subject to the following conditions:
|
11
|
-
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
13
|
-
copies or substantial portions of the Software.
|
14
|
-
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
-
SOFTWARE.
|
File without changes
|
File without changes
|