fivetran-connector-sdk 0.9.16.1__tar.gz → 0.10.7.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/PKG-INFO +1 -1
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/__init__.py +316 -280
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/PKG-INFO +1 -1
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/README.md +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/pyproject.toml +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/setup.cfg +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/__init__.py +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2.py +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2.pyi +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2_grpc.py +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.py +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.pyi +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/SOURCES.txt +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/dependency_links.txt +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/entry_points.txt +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/requires.txt +0 -0
- {fivetran_connector_sdk-0.9.16.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fivetran_connector_sdk
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.10.7.1
|
4
4
|
Summary: Build custom connectors on Fivetran platform
|
5
5
|
Author-email: Fivetran <developers@fivetran.com>
|
6
6
|
Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import argparse
|
2
|
-
from typing import Optional
|
2
|
+
from typing import Optional, Tuple
|
3
3
|
|
4
4
|
import grpc
|
5
5
|
import importlib.util
|
@@ -25,7 +25,7 @@ from fivetran_connector_sdk.protos import common_pb2
|
|
25
25
|
from fivetran_connector_sdk.protos import connector_sdk_pb2
|
26
26
|
from fivetran_connector_sdk.protos import connector_sdk_pb2_grpc
|
27
27
|
|
28
|
-
__version__ = "0.
|
28
|
+
__version__ = "0.10.07.1"
|
29
29
|
|
30
30
|
MAC_OS = "mac"
|
31
31
|
WIN_OS = "windows"
|
@@ -38,6 +38,7 @@ UPLOAD_FILENAME = "code.zip"
|
|
38
38
|
LAST_VERSION_CHECK_FILE = "_last_version_check"
|
39
39
|
ROOT_LOCATION = ".ft_sdk_connector_tester"
|
40
40
|
OUTPUT_FILES_DIR = "files"
|
41
|
+
REQUIREMENTS_TXT = "requirements.txt"
|
41
42
|
ONE_DAY_IN_SEC = 24 * 60 * 60
|
42
43
|
|
43
44
|
EXCLUDED_DIRS = ["__pycache__", "lib", "include", OUTPUT_FILES_DIR]
|
@@ -121,7 +122,7 @@ class Logging:
|
|
121
122
|
class Operations:
|
122
123
|
@staticmethod
|
123
124
|
def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
|
124
|
-
"""
|
125
|
+
"""Updates records with the same primary key if already present in the destination. Inserts new records if not already present in the destination.
|
125
126
|
|
126
127
|
Args:
|
127
128
|
table (str): The name of the table.
|
@@ -207,18 +208,29 @@ class Operations:
|
|
207
208
|
|
208
209
|
@staticmethod
|
209
210
|
def checkpoint(state: dict) -> connector_sdk_pb2.UpdateResponse:
|
210
|
-
"""
|
211
|
+
"""Checkpoint saves the connector's state. State is a dict which stores information to continue the
|
212
|
+
sync from where it left off in the previous sync. For example, you may choose to have a field called
|
213
|
+
"cursor" with a timestamp value to indicate up to when the data has been synced. This makes it possible
|
214
|
+
for the next sync to fetch data incrementally from that time forward. See below for a few example fields
|
215
|
+
which act as parameters for use by the connector code.\n
|
216
|
+
{
|
217
|
+
"initialSync": true,\n
|
218
|
+
"cursor": "1970-01-01T00:00:00.00Z",\n
|
219
|
+
"last_resync": "1970-01-01T00:00:00.00Z",\n
|
220
|
+
"thread_count": 5,\n
|
221
|
+
"api_quota_left": 5000000
|
222
|
+
}
|
211
223
|
|
212
224
|
Args:
|
213
|
-
state (dict): The state to checkpoint.
|
225
|
+
state (dict): The state to checkpoint/save.
|
214
226
|
|
215
227
|
Returns:
|
216
228
|
connector_sdk_pb2.UpdateResponse: The checkpoint response.
|
217
229
|
"""
|
218
230
|
_yield_check(inspect.stack())
|
219
231
|
return connector_sdk_pb2.UpdateResponse(
|
220
|
-
|
221
|
-
|
232
|
+
operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
|
233
|
+
state_json=json.dumps(state))))
|
222
234
|
|
223
235
|
|
224
236
|
def check_newer_version():
|
@@ -284,76 +296,85 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
284
296
|
if v is None:
|
285
297
|
mapped_data[k] = common_pb2.ValueType(null=True)
|
286
298
|
elif (k in columns) and columns[k].type != common_pb2.DataType.UNSPECIFIED:
|
287
|
-
|
288
|
-
mapped_data[k] = common_pb2.ValueType(bool=v)
|
289
|
-
elif columns[k].type == common_pb2.DataType.SHORT:
|
290
|
-
mapped_data[k] = common_pb2.ValueType(short=v)
|
291
|
-
elif columns[k].type == common_pb2.DataType.INT:
|
292
|
-
mapped_data[k] = common_pb2.ValueType(int=v)
|
293
|
-
elif columns[k].type == common_pb2.DataType.LONG:
|
294
|
-
mapped_data[k] = common_pb2.ValueType(long=v)
|
295
|
-
elif columns[k].type == common_pb2.DataType.DECIMAL:
|
296
|
-
mapped_data[k] = common_pb2.ValueType(decimal=v)
|
297
|
-
elif columns[k].type == common_pb2.DataType.FLOAT:
|
298
|
-
mapped_data[k] = common_pb2.ValueType(float=v)
|
299
|
-
elif columns[k].type == common_pb2.DataType.DOUBLE:
|
300
|
-
mapped_data[k] = common_pb2.ValueType(double=v)
|
301
|
-
elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
|
302
|
-
timestamp = timestamp_pb2.Timestamp()
|
303
|
-
dt = datetime.strptime(v, "%Y-%m-%d")
|
304
|
-
timestamp.FromDatetime(dt)
|
305
|
-
mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
|
306
|
-
elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
|
307
|
-
if '.' not in v: v = v + ".0"
|
308
|
-
timestamp = timestamp_pb2.Timestamp()
|
309
|
-
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
|
310
|
-
timestamp.FromDatetime(dt)
|
311
|
-
mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
|
312
|
-
elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
|
313
|
-
timestamp = timestamp_pb2.Timestamp()
|
314
|
-
if '.' in v:
|
315
|
-
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
|
316
|
-
else:
|
317
|
-
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
|
318
|
-
timestamp.FromDatetime(dt)
|
319
|
-
mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
|
320
|
-
elif columns[k].type == common_pb2.DataType.BINARY:
|
321
|
-
mapped_data[k] = common_pb2.ValueType(binary=v)
|
322
|
-
elif columns[k].type == common_pb2.DataType.XML:
|
323
|
-
mapped_data[k] = common_pb2.ValueType(xml=v)
|
324
|
-
elif columns[k].type == common_pb2.DataType.STRING:
|
325
|
-
incoming = v if isinstance(v, str) else str(v)
|
326
|
-
mapped_data[k] = common_pb2.ValueType(string=incoming)
|
327
|
-
elif columns[k].type == common_pb2.DataType.JSON:
|
328
|
-
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
329
|
-
else:
|
330
|
-
raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
|
299
|
+
map_defined_data_type(columns, k, mapped_data, v)
|
331
300
|
else:
|
332
|
-
|
333
|
-
if isinstance(v, int):
|
334
|
-
if abs(v) > 2147483647:
|
335
|
-
mapped_data[k] = common_pb2.ValueType(long=v)
|
336
|
-
else:
|
337
|
-
mapped_data[k] = common_pb2.ValueType(int=v)
|
338
|
-
elif isinstance(v, float):
|
339
|
-
mapped_data[k] = common_pb2.ValueType(float=v)
|
340
|
-
elif isinstance(v, bool):
|
341
|
-
mapped_data[k] = common_pb2.ValueType(bool=v)
|
342
|
-
elif isinstance(v, bytes):
|
343
|
-
mapped_data[k] = common_pb2.ValueType(binary=v)
|
344
|
-
elif isinstance(v, list):
|
345
|
-
raise ValueError("Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
|
346
|
-
elif isinstance(v, dict):
|
347
|
-
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
348
|
-
elif isinstance(v, str):
|
349
|
-
mapped_data[k] = common_pb2.ValueType(string=v)
|
350
|
-
else:
|
351
|
-
# Convert arbitrary objects to string
|
352
|
-
mapped_data[k] = common_pb2.ValueType(string=str(v))
|
301
|
+
map_inferred_data_type(k, mapped_data, v)
|
353
302
|
|
354
303
|
return mapped_data
|
355
304
|
|
356
305
|
|
306
|
+
def map_inferred_data_type(k, mapped_data, v):
|
307
|
+
# We can infer type from the value
|
308
|
+
if isinstance(v, int):
|
309
|
+
if abs(v) > 2147483647:
|
310
|
+
mapped_data[k] = common_pb2.ValueType(long=v)
|
311
|
+
else:
|
312
|
+
mapped_data[k] = common_pb2.ValueType(int=v)
|
313
|
+
elif isinstance(v, float):
|
314
|
+
mapped_data[k] = common_pb2.ValueType(float=v)
|
315
|
+
elif isinstance(v, bool):
|
316
|
+
mapped_data[k] = common_pb2.ValueType(bool=v)
|
317
|
+
elif isinstance(v, bytes):
|
318
|
+
mapped_data[k] = common_pb2.ValueType(binary=v)
|
319
|
+
elif isinstance(v, list):
|
320
|
+
raise ValueError(
|
321
|
+
"Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
|
322
|
+
elif isinstance(v, dict):
|
323
|
+
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
324
|
+
elif isinstance(v, str):
|
325
|
+
mapped_data[k] = common_pb2.ValueType(string=v)
|
326
|
+
else:
|
327
|
+
# Convert arbitrary objects to string
|
328
|
+
mapped_data[k] = common_pb2.ValueType(string=str(v))
|
329
|
+
|
330
|
+
|
331
|
+
def map_defined_data_type(columns, k, mapped_data, v):
|
332
|
+
if columns[k].type == common_pb2.DataType.BOOLEAN:
|
333
|
+
mapped_data[k] = common_pb2.ValueType(bool=v)
|
334
|
+
elif columns[k].type == common_pb2.DataType.SHORT:
|
335
|
+
mapped_data[k] = common_pb2.ValueType(short=v)
|
336
|
+
elif columns[k].type == common_pb2.DataType.INT:
|
337
|
+
mapped_data[k] = common_pb2.ValueType(int=v)
|
338
|
+
elif columns[k].type == common_pb2.DataType.LONG:
|
339
|
+
mapped_data[k] = common_pb2.ValueType(long=v)
|
340
|
+
elif columns[k].type == common_pb2.DataType.DECIMAL:
|
341
|
+
mapped_data[k] = common_pb2.ValueType(decimal=v)
|
342
|
+
elif columns[k].type == common_pb2.DataType.FLOAT:
|
343
|
+
mapped_data[k] = common_pb2.ValueType(float=v)
|
344
|
+
elif columns[k].type == common_pb2.DataType.DOUBLE:
|
345
|
+
mapped_data[k] = common_pb2.ValueType(double=v)
|
346
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
|
347
|
+
timestamp = timestamp_pb2.Timestamp()
|
348
|
+
dt = datetime.strptime(v, "%Y-%m-%d")
|
349
|
+
timestamp.FromDatetime(dt)
|
350
|
+
mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
|
351
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
|
352
|
+
if '.' not in v: v = v + ".0"
|
353
|
+
timestamp = timestamp_pb2.Timestamp()
|
354
|
+
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
|
355
|
+
timestamp.FromDatetime(dt)
|
356
|
+
mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
|
357
|
+
elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
|
358
|
+
timestamp = timestamp_pb2.Timestamp()
|
359
|
+
if '.' in v:
|
360
|
+
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
|
361
|
+
else:
|
362
|
+
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
|
363
|
+
timestamp.FromDatetime(dt)
|
364
|
+
mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
|
365
|
+
elif columns[k].type == common_pb2.DataType.BINARY:
|
366
|
+
mapped_data[k] = common_pb2.ValueType(binary=v)
|
367
|
+
elif columns[k].type == common_pb2.DataType.XML:
|
368
|
+
mapped_data[k] = common_pb2.ValueType(xml=v)
|
369
|
+
elif columns[k].type == common_pb2.DataType.STRING:
|
370
|
+
incoming = v if isinstance(v, str) else str(v)
|
371
|
+
mapped_data[k] = common_pb2.ValueType(string=incoming)
|
372
|
+
elif columns[k].type == common_pb2.DataType.JSON:
|
373
|
+
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
374
|
+
else:
|
375
|
+
raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
|
376
|
+
|
377
|
+
|
357
378
|
def _yield_check(stack):
|
358
379
|
"""Checks for the presence of 'yield' in the calling code.
|
359
380
|
Args:
|
@@ -373,11 +394,13 @@ def _yield_check(stack):
|
|
373
394
|
calling_code = stack[1].code_context[0]
|
374
395
|
if f"{called_method}(" in calling_code:
|
375
396
|
if 'yield' not in calling_code:
|
376
|
-
print(
|
397
|
+
print(
|
398
|
+
f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
|
377
399
|
os._exit(1)
|
378
400
|
else:
|
379
401
|
# This should never happen
|
380
|
-
raise RuntimeError(
|
402
|
+
raise RuntimeError(
|
403
|
+
f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
|
381
404
|
|
382
405
|
|
383
406
|
def _check_dict(incoming: dict, string_only: bool = False) -> dict:
|
@@ -394,12 +417,14 @@ def _check_dict(incoming: dict, string_only: bool = False) -> dict:
|
|
394
417
|
return {}
|
395
418
|
|
396
419
|
if not isinstance(incoming, dict):
|
397
|
-
raise ValueError(
|
420
|
+
raise ValueError(
|
421
|
+
"Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
|
398
422
|
|
399
423
|
if string_only:
|
400
424
|
for k, v in incoming.items():
|
401
425
|
if not isinstance(v, str):
|
402
|
-
print(
|
426
|
+
print(
|
427
|
+
"SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
|
403
428
|
os._exit(1)
|
404
429
|
|
405
430
|
return incoming
|
@@ -425,6 +450,23 @@ def log_unused_deps_error(package_name: str, version: str):
|
|
425
450
|
os._exit(1)
|
426
451
|
|
427
452
|
|
453
|
+
def validate_deploy_parameters(connection, deploy_key):
|
454
|
+
if not deploy_key or not connection:
|
455
|
+
print("SEVERE: The deploy command needs the following parameters:"
|
456
|
+
"\n\tRequired:\n"
|
457
|
+
"\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
|
458
|
+
"\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
|
459
|
+
"\t(Optional):\n"
|
460
|
+
"\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
|
461
|
+
"\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
|
462
|
+
os._exit(1)
|
463
|
+
if not is_connection_name_valid(connection):
|
464
|
+
print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
|
465
|
+
f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
|
466
|
+
f"letters, or digits (0-9). Uppercase characters are not allowed.")
|
467
|
+
os._exit(1)
|
468
|
+
|
469
|
+
|
428
470
|
class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
429
471
|
def __init__(self, update, schema=None):
|
430
472
|
"""Initializes the Connector instance.
|
@@ -439,22 +481,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
439
481
|
self.configuration = None
|
440
482
|
self.state = None
|
441
483
|
|
442
|
-
@staticmethod
|
443
|
-
def __unpause_connection(id: str, deploy_key: str) -> bool:
|
444
|
-
"""Unpauses the connection with the given ID and deployment key.
|
445
|
-
|
446
|
-
Args:
|
447
|
-
id (str): The connection ID.
|
448
|
-
deploy_key (str): The deployment key.
|
449
|
-
|
450
|
-
Returns:
|
451
|
-
bool: True if the connection was successfully unpaused, False otherwise.
|
452
|
-
"""
|
453
|
-
resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
|
454
|
-
headers={"Authorization": f"Basic {deploy_key}"},
|
455
|
-
json={"force": True})
|
456
|
-
return resp.ok
|
457
|
-
|
458
484
|
@staticmethod
|
459
485
|
def fetch_requirements_from_file(file_path: str) -> list[str]:
|
460
486
|
"""Reads a requirements file and returns a list of dependencies.
|
@@ -515,13 +541,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
515
541
|
os.remove(tmp_requirements_file_path)
|
516
542
|
|
517
543
|
if len(tmp_requirements) > 0:
|
518
|
-
|
519
|
-
requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
|
520
|
-
else:
|
521
|
-
with open("requirements.txt", 'w'):
|
522
|
-
pass
|
523
|
-
requirements = {}
|
524
|
-
print("WARNING: Adding `requirements.txt` file to your project folder.")
|
544
|
+
requirements = self.load_or_add_requirements_file(project_path)
|
525
545
|
|
526
546
|
version_mismatch_deps = {key: tmp_requirements[key] for key in
|
527
547
|
(requirements.keys() & tmp_requirements.keys())
|
@@ -532,35 +552,51 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
532
552
|
|
533
553
|
missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
|
534
554
|
if missing_deps:
|
535
|
-
|
536
|
-
print(log_level +
|
537
|
-
": Please include the following dependency libraries in requirements.txt, to be used by "
|
538
|
-
"Fivetran production. "
|
539
|
-
"For more information, please visit: "
|
540
|
-
"https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
|
541
|
-
"#workingwithrequirementstxtfile")
|
542
|
-
print(*list(missing_deps.values()))
|
543
|
-
if is_deploy:
|
544
|
-
os._exit(1)
|
555
|
+
self.handle_missing_deps(is_deploy, missing_deps)
|
545
556
|
|
546
557
|
unused_deps = list(requirements.keys() - tmp_requirements.keys())
|
547
558
|
if unused_deps:
|
548
|
-
|
549
|
-
log_unused_deps_error("fivetran_connector_sdk", __version__)
|
550
|
-
elif 'requests' in unused_deps:
|
551
|
-
log_unused_deps_error("requests", "2.32.3")
|
552
|
-
else:
|
553
|
-
print("INFO: The following dependencies are not needed, "
|
554
|
-
"they are not used or already installed. Please remove them from requirements.txt:")
|
555
|
-
print(*unused_deps)
|
559
|
+
self.handle_unused_deps(unused_deps)
|
556
560
|
else:
|
557
|
-
if os.path.exists(
|
561
|
+
if os.path.exists(REQUIREMENTS_TXT):
|
558
562
|
print("WARNING: `requirements.txt` is not required as no additional "
|
559
563
|
"Python libraries are required or all required libraries for "
|
560
564
|
"your code are pre-installed.")
|
561
565
|
|
562
566
|
if is_deploy: print("Successful validation of requirements.txt")
|
563
567
|
|
568
|
+
def handle_unused_deps(self, unused_deps):
|
569
|
+
if 'fivetran_connector_sdk' in unused_deps:
|
570
|
+
log_unused_deps_error("fivetran_connector_sdk", __version__)
|
571
|
+
elif 'requests' in unused_deps:
|
572
|
+
log_unused_deps_error("requests", "2.32.3")
|
573
|
+
else:
|
574
|
+
print("INFO: The following dependencies are not needed, "
|
575
|
+
"they are not used or already installed. Please remove them from requirements.txt:")
|
576
|
+
print(*unused_deps)
|
577
|
+
|
578
|
+
def handle_missing_deps(self, is_deploy, missing_deps):
|
579
|
+
log_level = "ERROR" if is_deploy else "WARNING"
|
580
|
+
print(log_level +
|
581
|
+
": Please include the following dependency libraries in requirements.txt, to be used by "
|
582
|
+
"Fivetran production. "
|
583
|
+
"For more information, please visit: "
|
584
|
+
"https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
|
585
|
+
"#workingwithrequirementstxtfile")
|
586
|
+
print(*list(missing_deps.values()))
|
587
|
+
if is_deploy:
|
588
|
+
os._exit(1)
|
589
|
+
|
590
|
+
def load_or_add_requirements_file(self, project_path):
|
591
|
+
if os.path.exists(REQUIREMENTS_TXT):
|
592
|
+
requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
|
593
|
+
else:
|
594
|
+
with open(REQUIREMENTS_TXT, 'w'):
|
595
|
+
pass
|
596
|
+
requirements = {}
|
597
|
+
print("WARNING: Adding `requirements.txt` file to your project folder.")
|
598
|
+
return requirements
|
599
|
+
|
564
600
|
# Call this method to deploy the connector to Fivetran platform
|
565
601
|
def deploy(self, project_path: str, deploy_key: str, group: str, connection: str, configuration: dict = None):
|
566
602
|
"""Deploys the connector to the Fivetran platform.
|
@@ -572,21 +608,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
572
608
|
connection (str): The connection name.
|
573
609
|
configuration (dict): The configuration dictionary.
|
574
610
|
"""
|
575
|
-
|
576
|
-
print("SEVERE: The deploy command needs the following parameters:"
|
577
|
-
"\n\tRequired:\n"
|
578
|
-
"\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
|
579
|
-
"\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
|
580
|
-
"\t(Optional):\n"
|
581
|
-
"\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
|
582
|
-
"\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
|
583
|
-
os._exit(1)
|
584
|
-
|
585
|
-
if not is_connection_name_valid(connection):
|
586
|
-
print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
|
587
|
-
f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
|
588
|
-
f"letters, or digits (0-9). Uppercase characters are not allowed.")
|
589
|
-
os._exit(1)
|
611
|
+
validate_deploy_parameters(connection, deploy_key)
|
590
612
|
|
591
613
|
_check_dict(configuration, True)
|
592
614
|
|
@@ -653,22 +675,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
653
675
|
if not upload_result:
|
654
676
|
os._exit(1)
|
655
677
|
|
656
|
-
@staticmethod
|
657
|
-
def __force_sync(id: str, deploy_key: str) -> bool:
|
658
|
-
"""Forces a sync operation on the connection with the given ID and deployment key.
|
659
|
-
|
660
|
-
Args:
|
661
|
-
id (str): The connection ID.
|
662
|
-
deploy_key (str): The deployment key.
|
663
|
-
|
664
|
-
Returns:
|
665
|
-
bool: True if the sync was successfully started, False otherwise.
|
666
|
-
"""
|
667
|
-
resp = rq.post(f"https://api.fivetran.com/v1/connectors/{id}/sync",
|
668
|
-
headers={"Authorization": f"Basic {deploy_key}"},
|
669
|
-
json={"force": True})
|
670
|
-
return resp.ok
|
671
|
-
|
672
678
|
@staticmethod
|
673
679
|
def __update_connection(id: str, name: str, group: str, config: dict, deploy_key: str):
|
674
680
|
"""Updates the connection with the given ID, name, group, configuration, and deployment key.
|
@@ -686,16 +692,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
686
692
|
resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
|
687
693
|
headers={"Authorization": f"Basic {deploy_key}"},
|
688
694
|
json={
|
689
|
-
|
690
|
-
|
695
|
+
"config": config,
|
696
|
+
"run_setup_tests": True
|
691
697
|
})
|
692
698
|
|
693
699
|
if not resp.ok:
|
694
|
-
print(
|
700
|
+
print(
|
701
|
+
f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{resp.json()['message']}'.")
|
695
702
|
os._exit(1)
|
696
703
|
|
697
704
|
@staticmethod
|
698
|
-
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[str]:
|
705
|
+
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[Tuple[str, str]]:
|
699
706
|
"""Retrieves the connection ID for the specified connection schema name, group, and deployment key.
|
700
707
|
|
701
708
|
Args:
|
@@ -718,7 +725,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
718
725
|
if resp.json()['data']['items']:
|
719
726
|
return resp.json()['data']['items'][0]['id'], resp.json()['data']['items'][0]['service']
|
720
727
|
|
721
|
-
return None, None
|
728
|
+
return Optional[None, None]
|
722
729
|
|
723
730
|
@staticmethod
|
724
731
|
def __create_connection(deploy_key: str, group_id: str, config: dict) -> rq.Response:
|
@@ -735,12 +742,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
735
742
|
response = rq.post(f"https://api.fivetran.com/v1/connectors",
|
736
743
|
headers={"Authorization": f"Basic {deploy_key}"},
|
737
744
|
json={
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
745
|
+
"group_id": group_id,
|
746
|
+
"service": "connector_sdk",
|
747
|
+
"config": config,
|
748
|
+
"paused": True,
|
749
|
+
"run_setup_tests": True,
|
750
|
+
"sync_frequency": "360",
|
744
751
|
})
|
745
752
|
return response
|
746
753
|
|
@@ -780,7 +787,8 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
780
787
|
zipf.write(file_path, arcname)
|
781
788
|
|
782
789
|
if not connector_file_exists:
|
783
|
-
print(
|
790
|
+
print(
|
791
|
+
"SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
|
784
792
|
os._exit(1)
|
785
793
|
return upload_filepath
|
786
794
|
|
@@ -942,12 +950,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
942
950
|
configuration: dict = None,
|
943
951
|
state: dict = None,
|
944
952
|
log_level: Logging.Level = Logging.Level.FINE) -> bool:
|
945
|
-
"""Tests the connector code by running it with the connector tester
|
953
|
+
"""Tests the connector code by running it with the connector tester.\n
|
954
|
+
state.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithstatejsonfile\n
|
955
|
+
configuration.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile
|
946
956
|
|
947
957
|
Args:
|
948
958
|
project_path (str): The path to the project.
|
949
|
-
configuration (dict): The configuration dictionary.
|
950
|
-
state (dict): The state dictionary.
|
959
|
+
configuration (dict): The configuration dictionary, same as configuration.json if present.
|
960
|
+
state (dict): The state dictionary, same as state.json if present.
|
951
961
|
log_level (Logging.Level): The logging level.
|
952
962
|
|
953
963
|
Returns:
|
@@ -987,11 +997,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
987
997
|
with open(download_filepath, 'wb') as fo:
|
988
998
|
fo.write(r.content)
|
989
999
|
else:
|
990
|
-
|
991
|
-
|
992
|
-
|
993
|
-
|
994
|
-
|
1000
|
+
raise RuntimeError(
|
1001
|
+
f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or "
|
1002
|
+
f"try again later ( status code: {r.status_code}), url: {download_url}")
|
1003
|
+
except RuntimeError:
|
1004
|
+
raise RuntimeError(
|
1005
|
+
f"SEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
|
995
1006
|
|
996
1007
|
try:
|
997
1008
|
# unzip it
|
@@ -1005,9 +1016,9 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
1005
1016
|
os.chmod(java_exe, st.st_mode | stat.S_IEXEC)
|
1006
1017
|
print("✓")
|
1007
1018
|
except:
|
1008
|
-
print(f"\nSEVERE: Failed to install the connector tester. Error details: ", traceback.format_exc())
|
1009
1019
|
shutil.rmtree(tester_root_dir)
|
1010
|
-
|
1020
|
+
raise RuntimeError(f"\nSEVERE: Failed to install the connector tester. Error details: ",
|
1021
|
+
traceback.format_exc())
|
1011
1022
|
|
1012
1023
|
project_path = os.getcwd() if project_path is None else project_path
|
1013
1024
|
self.validate_requirements_file(project_path, False)
|
@@ -1017,18 +1028,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
1017
1028
|
# Uncomment this to run the tester manually
|
1018
1029
|
# server.wait_for_termination()
|
1019
1030
|
|
1020
|
-
error = False
|
1021
1031
|
try:
|
1022
1032
|
print(f"INFO: Running connector tester...")
|
1023
1033
|
for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path, 50051):
|
1024
1034
|
print(log_msg, end="")
|
1025
1035
|
except:
|
1026
1036
|
print(traceback.format_exc())
|
1027
|
-
error = True
|
1028
|
-
|
1029
1037
|
finally:
|
1030
1038
|
server.stop(grace=2.0)
|
1031
|
-
return error
|
1032
1039
|
|
1033
1040
|
@staticmethod
|
1034
1041
|
def __java_exe(location: str, os_name: str) -> str:
|
@@ -1147,80 +1154,90 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
1147
1154
|
else:
|
1148
1155
|
configuration = self.configuration if self.configuration else request.configuration
|
1149
1156
|
response = self.schema_method(configuration)
|
1157
|
+
self.process_tables(response)
|
1158
|
+
return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
|
1150
1159
|
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
table_name = entry['table']
|
1156
|
-
|
1157
|
-
if table_name in TABLES:
|
1158
|
-
raise ValueError("Table already defined: " + table_name)
|
1159
|
-
|
1160
|
-
table = common_pb2.Table(name=table_name)
|
1161
|
-
columns = {}
|
1162
|
-
|
1163
|
-
if "primary_key" in entry:
|
1164
|
-
for pkey_name in entry["primary_key"]:
|
1165
|
-
column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
|
1166
|
-
column.primary_key = True
|
1167
|
-
columns[pkey_name] = column
|
1168
|
-
|
1169
|
-
if "columns" in entry:
|
1170
|
-
for name, type in entry["columns"].items():
|
1171
|
-
column = columns[name] if name in columns else common_pb2.Column(name=name)
|
1172
|
-
|
1173
|
-
if isinstance(type, str):
|
1174
|
-
if type.upper() == "BOOLEAN":
|
1175
|
-
column.type = common_pb2.DataType.BOOLEAN
|
1176
|
-
elif type.upper() == "SHORT":
|
1177
|
-
column.type = common_pb2.DataType.SHORT
|
1178
|
-
elif type.upper() == "INT":
|
1179
|
-
column.type = common_pb2.DataType.SHORT
|
1180
|
-
elif type.upper() == "LONG":
|
1181
|
-
column.type = common_pb2.DataType.LONG
|
1182
|
-
elif type.upper() == "DECIMAL":
|
1183
|
-
raise ValueError("DECIMAL data type missing precision and scale")
|
1184
|
-
elif type.upper() == "FLOAT":
|
1185
|
-
column.type = common_pb2.DataType.FLOAT
|
1186
|
-
elif type.upper() == "DOUBLE":
|
1187
|
-
column.type = common_pb2.DataType.DOUBLE
|
1188
|
-
elif type.upper() == "NAIVE_DATE":
|
1189
|
-
column.type = common_pb2.DataType.NAIVE_DATE
|
1190
|
-
elif type.upper() == "NAIVE_DATETIME":
|
1191
|
-
column.type = common_pb2.DataType.NAIVE_DATETIME
|
1192
|
-
elif type.upper() == "UTC_DATETIME":
|
1193
|
-
column.type = common_pb2.DataType.UTC_DATETIME
|
1194
|
-
elif type.upper() == "BINARY":
|
1195
|
-
column.type = common_pb2.DataType.BINARY
|
1196
|
-
elif type.upper() == "XML":
|
1197
|
-
column.type = common_pb2.DataType.XML
|
1198
|
-
elif type.upper() == "STRING":
|
1199
|
-
column.type = common_pb2.DataType.STRING
|
1200
|
-
elif type.upper() == "JSON":
|
1201
|
-
column.type = common_pb2.DataType.JSON
|
1202
|
-
else:
|
1203
|
-
raise ValueError("Unrecognized column type encountered:: ", str(type))
|
1204
|
-
|
1205
|
-
elif isinstance(type, dict):
|
1206
|
-
if type['type'].upper() != "DECIMAL":
|
1207
|
-
raise ValueError("Expecting DECIMAL data type")
|
1208
|
-
column.type = common_pb2.DataType.DECIMAL
|
1209
|
-
column.decimal.precision = type['precision']
|
1210
|
-
column.decimal.scale = type['scale']
|
1211
|
-
|
1212
|
-
else:
|
1213
|
-
raise ValueError("Unrecognized column type: ", str(type))
|
1214
|
-
|
1215
|
-
if "primary_key" in entry and name in entry["primary_key"]:
|
1216
|
-
column.primary_key = True
|
1217
|
-
|
1218
|
-
columns[name] = column
|
1219
|
-
|
1220
|
-
table.columns.extend(columns.values())
|
1221
|
-
TABLES[table_name] = table
|
1160
|
+
def process_tables(self, response):
|
1161
|
+
for entry in response:
|
1162
|
+
if 'table' not in entry:
|
1163
|
+
raise ValueError("Entry missing table name: " + entry)
|
1222
1164
|
|
1223
|
-
|
1165
|
+
table_name = entry['table']
|
1166
|
+
|
1167
|
+
if table_name in TABLES:
|
1168
|
+
raise ValueError("Table already defined: " + table_name)
|
1169
|
+
|
1170
|
+
table = common_pb2.Table(name=table_name)
|
1171
|
+
columns = {}
|
1172
|
+
|
1173
|
+
if "primary_key" in entry:
|
1174
|
+
self.process_primary_keys(columns, entry)
|
1175
|
+
|
1176
|
+
if "columns" in entry:
|
1177
|
+
self.process_columns(columns, entry)
|
1178
|
+
|
1179
|
+
table.columns.extend(columns.values())
|
1180
|
+
TABLES[table_name] = table
|
1181
|
+
|
1182
|
+
def process_primary_keys(self, columns, entry):
|
1183
|
+
for pkey_name in entry["primary_key"]:
|
1184
|
+
column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
|
1185
|
+
column.primary_key = True
|
1186
|
+
columns[pkey_name] = column
|
1187
|
+
|
1188
|
+
def process_columns(self, columns, entry):
|
1189
|
+
for name, type in entry["columns"].items():
|
1190
|
+
column = columns[name] if name in columns else common_pb2.Column(name=name)
|
1191
|
+
|
1192
|
+
if isinstance(type, str):
|
1193
|
+
self.process_data_type(column, type)
|
1194
|
+
|
1195
|
+
elif isinstance(type, dict):
|
1196
|
+
if type['type'].upper() != "DECIMAL":
|
1197
|
+
raise ValueError("Expecting DECIMAL data type")
|
1198
|
+
column.type = common_pb2.DataType.DECIMAL
|
1199
|
+
column.decimal.precision = type['precision']
|
1200
|
+
column.decimal.scale = type['scale']
|
1201
|
+
|
1202
|
+
else:
|
1203
|
+
raise ValueError("Unrecognized column type: ", str(type))
|
1204
|
+
|
1205
|
+
if "primary_key" in entry and name in entry["primary_key"]:
|
1206
|
+
column.primary_key = True
|
1207
|
+
|
1208
|
+
columns[name] = column
|
1209
|
+
|
1210
|
+
def process_data_type(self, column, type):
|
1211
|
+
if type.upper() == "BOOLEAN":
|
1212
|
+
column.type = common_pb2.DataType.BOOLEAN
|
1213
|
+
elif type.upper() == "SHORT":
|
1214
|
+
column.type = common_pb2.DataType.SHORT
|
1215
|
+
elif type.upper() == "INT":
|
1216
|
+
column.type = common_pb2.DataType.SHORT
|
1217
|
+
elif type.upper() == "LONG":
|
1218
|
+
column.type = common_pb2.DataType.LONG
|
1219
|
+
elif type.upper() == "DECIMAL":
|
1220
|
+
raise ValueError("DECIMAL data type missing precision and scale")
|
1221
|
+
elif type.upper() == "FLOAT":
|
1222
|
+
column.type = common_pb2.DataType.FLOAT
|
1223
|
+
elif type.upper() == "DOUBLE":
|
1224
|
+
column.type = common_pb2.DataType.DOUBLE
|
1225
|
+
elif type.upper() == "NAIVE_DATE":
|
1226
|
+
column.type = common_pb2.DataType.NAIVE_DATE
|
1227
|
+
elif type.upper() == "NAIVE_DATETIME":
|
1228
|
+
column.type = common_pb2.DataType.NAIVE_DATETIME
|
1229
|
+
elif type.upper() == "UTC_DATETIME":
|
1230
|
+
column.type = common_pb2.DataType.UTC_DATETIME
|
1231
|
+
elif type.upper() == "BINARY":
|
1232
|
+
column.type = common_pb2.DataType.BINARY
|
1233
|
+
elif type.upper() == "XML":
|
1234
|
+
column.type = common_pb2.DataType.XML
|
1235
|
+
elif type.upper() == "STRING":
|
1236
|
+
column.type = common_pb2.DataType.STRING
|
1237
|
+
elif type.upper() == "JSON":
|
1238
|
+
column.type = common_pb2.DataType.JSON
|
1239
|
+
else:
|
1240
|
+
raise ValueError("Unrecognized column type encountered:: ", str(type))
|
1224
1241
|
|
1225
1242
|
def Update(self, request, context):
|
1226
1243
|
"""Overrides the Update method from ConnectorServicer.
|
@@ -1269,14 +1286,16 @@ def find_connector_object(project_path) -> Connector:
|
|
1269
1286
|
if '<fivetran_connector_sdk.Connector object at' in str(obj_attr):
|
1270
1287
|
return obj_attr
|
1271
1288
|
|
1272
|
-
print(
|
1289
|
+
print(
|
1290
|
+
"SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
|
1273
1291
|
sys.exit(1)
|
1274
1292
|
|
1275
1293
|
|
1276
1294
|
def suggest_correct_command(input_command: str) -> bool:
|
1277
1295
|
# for typos
|
1278
1296
|
# calculate the edit distance of the input command (lowercased) with each of the valid commands
|
1279
|
-
edit_distances_of_commands = sorted(
|
1297
|
+
edit_distances_of_commands = sorted(
|
1298
|
+
[(command, edit_distance(command, input_command.lower())) for command in VALID_COMMANDS], key=lambda x: x[1])
|
1280
1299
|
|
1281
1300
|
if edit_distances_of_commands[0][1] <= MAX_ALLOWED_EDIT_DISTANCE_FROM_VALID_COMMAND:
|
1282
1301
|
# if the closest command is within the max allowed edit distance, we suggest that command
|
@@ -1304,15 +1323,15 @@ def edit_distance(first_string: str, second_string: str) -> int:
|
|
1304
1323
|
first_string_length: int = len(first_string)
|
1305
1324
|
second_string_length: int = len(second_string)
|
1306
1325
|
|
1307
|
-
# Initialize the previous row of distances (for the base case of an empty first string)
|
1308
|
-
#
|
1326
|
+
# Initialize the previous row of distances (for the base case of an empty first string) 'previous_row[j]' holds
|
1327
|
+
# the edit distance between an empty prefix of 'first_string' and the first 'j' characters of 'second_string'.
|
1309
1328
|
# The first row is filled with values [0, 1, 2, ..., second_string_length]
|
1310
1329
|
previous_row: list[int] = list(range(second_string_length + 1))
|
1311
1330
|
|
1312
1331
|
# Rest of the rows
|
1313
1332
|
for first_string_index in range(1, first_string_length + 1):
|
1314
1333
|
# Start the current row with the distance for an empty second string
|
1315
|
-
current_row: list[int] = [first_string_index]
|
1334
|
+
current_row: list[int] = [first_string_index]
|
1316
1335
|
|
1317
1336
|
# Iterate over each character in the second string
|
1318
1337
|
for second_string_index in range(1, second_string_length + 1):
|
@@ -1321,7 +1340,8 @@ def edit_distance(first_string: str, second_string: str) -> int:
|
|
1321
1340
|
current_row.append(previous_row[second_string_index - 1])
|
1322
1341
|
else:
|
1323
1342
|
# Minimum cost of insertion, deletion, or substitution
|
1324
|
-
current_row.append(
|
1343
|
+
current_row.append(
|
1344
|
+
1 + min(current_row[-1], previous_row[second_string_index], previous_row[second_string_index - 1]))
|
1325
1345
|
|
1326
1346
|
# Move to the next row
|
1327
1347
|
previous_row = current_row
|
@@ -1350,6 +1370,10 @@ def main():
|
|
1350
1370
|
|
1351
1371
|
args = parser.parse_args()
|
1352
1372
|
|
1373
|
+
if args.command.lower() == "version":
|
1374
|
+
print("fivetran_connector_sdk " + __version__)
|
1375
|
+
return
|
1376
|
+
|
1353
1377
|
connector_object = find_connector_object(args.project_path)
|
1354
1378
|
|
1355
1379
|
# Process optional args
|
@@ -1359,16 +1383,40 @@ def main():
|
|
1359
1383
|
configuration = args.configuration if args.configuration else None
|
1360
1384
|
state = args.state if args.state else os.getenv('FIVETRAN_STATE', None)
|
1361
1385
|
|
1386
|
+
configuration = validate_and_load_configuration(args, configuration)
|
1387
|
+
state = validate_and_load_state(args, state)
|
1388
|
+
|
1389
|
+
if args.command.lower() == "deploy":
|
1390
|
+
if args.state:
|
1391
|
+
print("WARNING: 'state' parameter is not used for 'deploy' command")
|
1392
|
+
connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
|
1393
|
+
|
1394
|
+
elif args.command.lower() == "debug":
|
1395
|
+
connector_object.debug(args.project_path, configuration, state)
|
1396
|
+
|
1397
|
+
elif args.command.lower() == "reset":
|
1398
|
+
reset_local_file_directory(args)
|
1399
|
+
else:
|
1400
|
+
if not suggest_correct_command(args.command):
|
1401
|
+
raise NotImplementedError(f"Invalid command: {args.command}, see `fivetran --help`")
|
1402
|
+
|
1403
|
+
|
1404
|
+
def validate_and_load_configuration(args, configuration):
|
1362
1405
|
if configuration:
|
1363
1406
|
json_filepath = os.path.join(args.project_path, args.configuration)
|
1364
1407
|
if os.path.isfile(json_filepath):
|
1365
1408
|
with open(json_filepath, 'r') as fi:
|
1366
1409
|
configuration = json.load(fi)
|
1367
1410
|
else:
|
1368
|
-
raise ValueError(
|
1411
|
+
raise ValueError(
|
1412
|
+
"Configuration must be provided as a JSON file. Please check your input. Reference: "
|
1413
|
+
"https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
|
1369
1414
|
else:
|
1370
1415
|
configuration = {}
|
1416
|
+
return configuration
|
1417
|
+
|
1371
1418
|
|
1419
|
+
def validate_and_load_state(args, state):
|
1372
1420
|
if state:
|
1373
1421
|
json_filepath = os.path.join(args.project_path, args.state)
|
1374
1422
|
if os.path.isfile(json_filepath):
|
@@ -1378,35 +1426,23 @@ def main():
|
|
1378
1426
|
state = json.loads(state)
|
1379
1427
|
else:
|
1380
1428
|
state = {}
|
1429
|
+
return state
|
1381
1430
|
|
1382
|
-
if args.command.lower() == "deploy":
|
1383
|
-
if args.state:
|
1384
|
-
print("WARNING: 'state' parameter is not used for 'deploy' command")
|
1385
|
-
connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
|
1386
|
-
|
1387
|
-
elif args.command.lower() == "debug":
|
1388
|
-
connector_object.debug(args.project_path, configuration, state)
|
1389
1431
|
|
1390
|
-
|
1391
|
-
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1395
|
-
|
1396
|
-
try:
|
1397
|
-
if os.path.exists(files_path) and os.path.isdir(files_path):
|
1398
|
-
shutil.rmtree(files_path)
|
1399
|
-
print("INFO: Reset Successful")
|
1400
|
-
except Exception as e:
|
1401
|
-
print("ERROR: Reset Failed")
|
1402
|
-
raise e
|
1403
|
-
|
1404
|
-
elif args.command.lower() == "version":
|
1405
|
-
print("fivetran_connector_sdk " + __version__)
|
1406
|
-
|
1432
|
+
def reset_local_file_directory(args):
|
1433
|
+
files_path = os.path.join(args.project_path, OUTPUT_FILES_DIR)
|
1434
|
+
confirm = input(
|
1435
|
+
"This will delete your current state and `warehouse.db` files. Do you want to continue? (Y/N): ")
|
1436
|
+
if confirm.lower() != "y":
|
1437
|
+
print("INFO: Reset canceled")
|
1407
1438
|
else:
|
1408
|
-
|
1409
|
-
|
1439
|
+
try:
|
1440
|
+
if os.path.exists(files_path) and os.path.isdir(files_path):
|
1441
|
+
shutil.rmtree(files_path)
|
1442
|
+
print("INFO: Reset Successful")
|
1443
|
+
except Exception as e:
|
1444
|
+
print("ERROR: Reset Failed")
|
1445
|
+
raise e
|
1410
1446
|
|
1411
1447
|
|
1412
1448
|
if __name__ == "__main__":
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fivetran_connector_sdk
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.10.7.1
|
4
4
|
Summary: Build custom connectors on Fivetran platform
|
5
5
|
Author-email: Fivetran <developers@fivetran.com>
|
6
6
|
Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|