fivetran-connector-sdk 0.9.30.1__tar.gz → 0.10.7.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/PKG-INFO +1 -1
  2. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/__init__.py +299 -274
  3. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/PKG-INFO +1 -1
  4. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/README.md +0 -0
  5. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/pyproject.toml +0 -0
  6. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/setup.cfg +0 -0
  7. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/__init__.py +0 -0
  8. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2.py +0 -0
  9. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2.pyi +0 -0
  10. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/common_pb2_grpc.py +0 -0
  11. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.py +0 -0
  12. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.pyi +0 -0
  13. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py +0 -0
  14. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/SOURCES.txt +0 -0
  15. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/dependency_links.txt +0 -0
  16. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/entry_points.txt +0 -0
  17. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/requires.txt +0 -0
  18. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.7.1}/src/fivetran_connector_sdk.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fivetran_connector_sdk
3
- Version: 0.9.30.1
3
+ Version: 0.10.7.1
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
@@ -1,5 +1,5 @@
1
1
  import argparse
2
- from typing import Optional
2
+ from typing import Optional, Tuple
3
3
 
4
4
  import grpc
5
5
  import importlib.util
@@ -25,7 +25,7 @@ from fivetran_connector_sdk.protos import common_pb2
25
25
  from fivetran_connector_sdk.protos import connector_sdk_pb2
26
26
  from fivetran_connector_sdk.protos import connector_sdk_pb2_grpc
27
27
 
28
- __version__ = "0.9.30.1"
28
+ __version__ = "0.10.07.1"
29
29
 
30
30
  MAC_OS = "mac"
31
31
  WIN_OS = "windows"
@@ -38,6 +38,7 @@ UPLOAD_FILENAME = "code.zip"
38
38
  LAST_VERSION_CHECK_FILE = "_last_version_check"
39
39
  ROOT_LOCATION = ".ft_sdk_connector_tester"
40
40
  OUTPUT_FILES_DIR = "files"
41
+ REQUIREMENTS_TXT = "requirements.txt"
41
42
  ONE_DAY_IN_SEC = 24 * 60 * 60
42
43
 
43
44
  EXCLUDED_DIRS = ["__pycache__", "lib", "include", OUTPUT_FILES_DIR]
@@ -121,7 +122,7 @@ class Logging:
121
122
  class Operations:
122
123
  @staticmethod
123
124
  def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
124
- """Performs an upsert operation on the specified table with the given data, deleting any existing value with the same primary key.
125
+ """Updates records with the same primary key if already present in the destination. Inserts new records if not already present in the destination.
125
126
 
126
127
  Args:
127
128
  table (str): The name of the table.
@@ -228,8 +229,8 @@ class Operations:
228
229
  """
229
230
  _yield_check(inspect.stack())
230
231
  return connector_sdk_pb2.UpdateResponse(
231
- operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
232
- state_json=json.dumps(state))))
232
+ operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
233
+ state_json=json.dumps(state))))
233
234
 
234
235
 
235
236
  def check_newer_version():
@@ -295,76 +296,85 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
295
296
  if v is None:
296
297
  mapped_data[k] = common_pb2.ValueType(null=True)
297
298
  elif (k in columns) and columns[k].type != common_pb2.DataType.UNSPECIFIED:
298
- if columns[k].type == common_pb2.DataType.BOOLEAN:
299
- mapped_data[k] = common_pb2.ValueType(bool=v)
300
- elif columns[k].type == common_pb2.DataType.SHORT:
301
- mapped_data[k] = common_pb2.ValueType(short=v)
302
- elif columns[k].type == common_pb2.DataType.INT:
303
- mapped_data[k] = common_pb2.ValueType(int=v)
304
- elif columns[k].type == common_pb2.DataType.LONG:
305
- mapped_data[k] = common_pb2.ValueType(long=v)
306
- elif columns[k].type == common_pb2.DataType.DECIMAL:
307
- mapped_data[k] = common_pb2.ValueType(decimal=v)
308
- elif columns[k].type == common_pb2.DataType.FLOAT:
309
- mapped_data[k] = common_pb2.ValueType(float=v)
310
- elif columns[k].type == common_pb2.DataType.DOUBLE:
311
- mapped_data[k] = common_pb2.ValueType(double=v)
312
- elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
313
- timestamp = timestamp_pb2.Timestamp()
314
- dt = datetime.strptime(v, "%Y-%m-%d")
315
- timestamp.FromDatetime(dt)
316
- mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
317
- elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
318
- if '.' not in v: v = v + ".0"
319
- timestamp = timestamp_pb2.Timestamp()
320
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
321
- timestamp.FromDatetime(dt)
322
- mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
323
- elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
324
- timestamp = timestamp_pb2.Timestamp()
325
- if '.' in v:
326
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
327
- else:
328
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
329
- timestamp.FromDatetime(dt)
330
- mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
331
- elif columns[k].type == common_pb2.DataType.BINARY:
332
- mapped_data[k] = common_pb2.ValueType(binary=v)
333
- elif columns[k].type == common_pb2.DataType.XML:
334
- mapped_data[k] = common_pb2.ValueType(xml=v)
335
- elif columns[k].type == common_pb2.DataType.STRING:
336
- incoming = v if isinstance(v, str) else str(v)
337
- mapped_data[k] = common_pb2.ValueType(string=incoming)
338
- elif columns[k].type == common_pb2.DataType.JSON:
339
- mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
340
- else:
341
- raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
299
+ map_defined_data_type(columns, k, mapped_data, v)
342
300
  else:
343
- # We can infer type from the value
344
- if isinstance(v, int):
345
- if abs(v) > 2147483647:
346
- mapped_data[k] = common_pb2.ValueType(long=v)
347
- else:
348
- mapped_data[k] = common_pb2.ValueType(int=v)
349
- elif isinstance(v, float):
350
- mapped_data[k] = common_pb2.ValueType(float=v)
351
- elif isinstance(v, bool):
352
- mapped_data[k] = common_pb2.ValueType(bool=v)
353
- elif isinstance(v, bytes):
354
- mapped_data[k] = common_pb2.ValueType(binary=v)
355
- elif isinstance(v, list):
356
- raise ValueError("Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
357
- elif isinstance(v, dict):
358
- mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
359
- elif isinstance(v, str):
360
- mapped_data[k] = common_pb2.ValueType(string=v)
361
- else:
362
- # Convert arbitrary objects to string
363
- mapped_data[k] = common_pb2.ValueType(string=str(v))
301
+ map_inferred_data_type(k, mapped_data, v)
364
302
 
365
303
  return mapped_data
366
304
 
367
305
 
306
+ def map_inferred_data_type(k, mapped_data, v):
307
+ # We can infer type from the value
308
+ if isinstance(v, int):
309
+ if abs(v) > 2147483647:
310
+ mapped_data[k] = common_pb2.ValueType(long=v)
311
+ else:
312
+ mapped_data[k] = common_pb2.ValueType(int=v)
313
+ elif isinstance(v, float):
314
+ mapped_data[k] = common_pb2.ValueType(float=v)
315
+ elif isinstance(v, bool):
316
+ mapped_data[k] = common_pb2.ValueType(bool=v)
317
+ elif isinstance(v, bytes):
318
+ mapped_data[k] = common_pb2.ValueType(binary=v)
319
+ elif isinstance(v, list):
320
+ raise ValueError(
321
+ "Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
322
+ elif isinstance(v, dict):
323
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
324
+ elif isinstance(v, str):
325
+ mapped_data[k] = common_pb2.ValueType(string=v)
326
+ else:
327
+ # Convert arbitrary objects to string
328
+ mapped_data[k] = common_pb2.ValueType(string=str(v))
329
+
330
+
331
+ def map_defined_data_type(columns, k, mapped_data, v):
332
+ if columns[k].type == common_pb2.DataType.BOOLEAN:
333
+ mapped_data[k] = common_pb2.ValueType(bool=v)
334
+ elif columns[k].type == common_pb2.DataType.SHORT:
335
+ mapped_data[k] = common_pb2.ValueType(short=v)
336
+ elif columns[k].type == common_pb2.DataType.INT:
337
+ mapped_data[k] = common_pb2.ValueType(int=v)
338
+ elif columns[k].type == common_pb2.DataType.LONG:
339
+ mapped_data[k] = common_pb2.ValueType(long=v)
340
+ elif columns[k].type == common_pb2.DataType.DECIMAL:
341
+ mapped_data[k] = common_pb2.ValueType(decimal=v)
342
+ elif columns[k].type == common_pb2.DataType.FLOAT:
343
+ mapped_data[k] = common_pb2.ValueType(float=v)
344
+ elif columns[k].type == common_pb2.DataType.DOUBLE:
345
+ mapped_data[k] = common_pb2.ValueType(double=v)
346
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
347
+ timestamp = timestamp_pb2.Timestamp()
348
+ dt = datetime.strptime(v, "%Y-%m-%d")
349
+ timestamp.FromDatetime(dt)
350
+ mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
351
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
352
+ if '.' not in v: v = v + ".0"
353
+ timestamp = timestamp_pb2.Timestamp()
354
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
355
+ timestamp.FromDatetime(dt)
356
+ mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
357
+ elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
358
+ timestamp = timestamp_pb2.Timestamp()
359
+ if '.' in v:
360
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
361
+ else:
362
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
363
+ timestamp.FromDatetime(dt)
364
+ mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
365
+ elif columns[k].type == common_pb2.DataType.BINARY:
366
+ mapped_data[k] = common_pb2.ValueType(binary=v)
367
+ elif columns[k].type == common_pb2.DataType.XML:
368
+ mapped_data[k] = common_pb2.ValueType(xml=v)
369
+ elif columns[k].type == common_pb2.DataType.STRING:
370
+ incoming = v if isinstance(v, str) else str(v)
371
+ mapped_data[k] = common_pb2.ValueType(string=incoming)
372
+ elif columns[k].type == common_pb2.DataType.JSON:
373
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
374
+ else:
375
+ raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
376
+
377
+
368
378
  def _yield_check(stack):
369
379
  """Checks for the presence of 'yield' in the calling code.
370
380
  Args:
@@ -384,11 +394,13 @@ def _yield_check(stack):
384
394
  calling_code = stack[1].code_context[0]
385
395
  if f"{called_method}(" in calling_code:
386
396
  if 'yield' not in calling_code:
387
- print(f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
397
+ print(
398
+ f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
388
399
  os._exit(1)
389
400
  else:
390
401
  # This should never happen
391
- raise RuntimeError(f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
402
+ raise RuntimeError(
403
+ f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
392
404
 
393
405
 
394
406
  def _check_dict(incoming: dict, string_only: bool = False) -> dict:
@@ -405,12 +417,14 @@ def _check_dict(incoming: dict, string_only: bool = False) -> dict:
405
417
  return {}
406
418
 
407
419
  if not isinstance(incoming, dict):
408
- raise ValueError("Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
420
+ raise ValueError(
421
+ "Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
409
422
 
410
423
  if string_only:
411
424
  for k, v in incoming.items():
412
425
  if not isinstance(v, str):
413
- print("SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
426
+ print(
427
+ "SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
414
428
  os._exit(1)
415
429
 
416
430
  return incoming
@@ -436,6 +450,23 @@ def log_unused_deps_error(package_name: str, version: str):
436
450
  os._exit(1)
437
451
 
438
452
 
453
+ def validate_deploy_parameters(connection, deploy_key):
454
+ if not deploy_key or not connection:
455
+ print("SEVERE: The deploy command needs the following parameters:"
456
+ "\n\tRequired:\n"
457
+ "\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
458
+ "\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
459
+ "\t(Optional):\n"
460
+ "\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
461
+ "\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
462
+ os._exit(1)
463
+ if not is_connection_name_valid(connection):
464
+ print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
465
+ f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
466
+ f"letters, or digits (0-9). Uppercase characters are not allowed.")
467
+ os._exit(1)
468
+
469
+
439
470
  class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
440
471
  def __init__(self, update, schema=None):
441
472
  """Initializes the Connector instance.
@@ -450,22 +481,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
450
481
  self.configuration = None
451
482
  self.state = None
452
483
 
453
- @staticmethod
454
- def __unpause_connection(id: str, deploy_key: str) -> bool:
455
- """Unpauses the connection with the given ID and deployment key.
456
-
457
- Args:
458
- id (str): The connection ID.
459
- deploy_key (str): The deployment key.
460
-
461
- Returns:
462
- bool: True if the connection was successfully unpaused, False otherwise.
463
- """
464
- resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
465
- headers={"Authorization": f"Basic {deploy_key}"},
466
- json={"force": True})
467
- return resp.ok
468
-
469
484
  @staticmethod
470
485
  def fetch_requirements_from_file(file_path: str) -> list[str]:
471
486
  """Reads a requirements file and returns a list of dependencies.
@@ -526,13 +541,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
526
541
  os.remove(tmp_requirements_file_path)
527
542
 
528
543
  if len(tmp_requirements) > 0:
529
- if os.path.exists("requirements.txt"):
530
- requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
531
- else:
532
- with open("requirements.txt", 'w'):
533
- pass
534
- requirements = {}
535
- print("WARNING: Adding `requirements.txt` file to your project folder.")
544
+ requirements = self.load_or_add_requirements_file(project_path)
536
545
 
537
546
  version_mismatch_deps = {key: tmp_requirements[key] for key in
538
547
  (requirements.keys() & tmp_requirements.keys())
@@ -543,35 +552,51 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
543
552
 
544
553
  missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
545
554
  if missing_deps:
546
- log_level = "ERROR" if is_deploy else "WARNING"
547
- print(log_level +
548
- ": Please include the following dependency libraries in requirements.txt, to be used by "
549
- "Fivetran production. "
550
- "For more information, please visit: "
551
- "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
552
- "#workingwithrequirementstxtfile")
553
- print(*list(missing_deps.values()))
554
- if is_deploy:
555
- os._exit(1)
555
+ self.handle_missing_deps(is_deploy, missing_deps)
556
556
 
557
557
  unused_deps = list(requirements.keys() - tmp_requirements.keys())
558
558
  if unused_deps:
559
- if 'fivetran_connector_sdk' in unused_deps:
560
- log_unused_deps_error("fivetran_connector_sdk", __version__)
561
- elif 'requests' in unused_deps:
562
- log_unused_deps_error("requests", "2.32.3")
563
- else:
564
- print("INFO: The following dependencies are not needed, "
565
- "they are not used or already installed. Please remove them from requirements.txt:")
566
- print(*unused_deps)
559
+ self.handle_unused_deps(unused_deps)
567
560
  else:
568
- if os.path.exists("requirements.txt"):
561
+ if os.path.exists(REQUIREMENTS_TXT):
569
562
  print("WARNING: `requirements.txt` is not required as no additional "
570
563
  "Python libraries are required or all required libraries for "
571
564
  "your code are pre-installed.")
572
565
 
573
566
  if is_deploy: print("Successful validation of requirements.txt")
574
567
 
568
+ def handle_unused_deps(self, unused_deps):
569
+ if 'fivetran_connector_sdk' in unused_deps:
570
+ log_unused_deps_error("fivetran_connector_sdk", __version__)
571
+ elif 'requests' in unused_deps:
572
+ log_unused_deps_error("requests", "2.32.3")
573
+ else:
574
+ print("INFO: The following dependencies are not needed, "
575
+ "they are not used or already installed. Please remove them from requirements.txt:")
576
+ print(*unused_deps)
577
+
578
+ def handle_missing_deps(self, is_deploy, missing_deps):
579
+ log_level = "ERROR" if is_deploy else "WARNING"
580
+ print(log_level +
581
+ ": Please include the following dependency libraries in requirements.txt, to be used by "
582
+ "Fivetran production. "
583
+ "For more information, please visit: "
584
+ "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
585
+ "#workingwithrequirementstxtfile")
586
+ print(*list(missing_deps.values()))
587
+ if is_deploy:
588
+ os._exit(1)
589
+
590
+ def load_or_add_requirements_file(self, project_path):
591
+ if os.path.exists(REQUIREMENTS_TXT):
592
+ requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
593
+ else:
594
+ with open(REQUIREMENTS_TXT, 'w'):
595
+ pass
596
+ requirements = {}
597
+ print("WARNING: Adding `requirements.txt` file to your project folder.")
598
+ return requirements
599
+
575
600
  # Call this method to deploy the connector to Fivetran platform
576
601
  def deploy(self, project_path: str, deploy_key: str, group: str, connection: str, configuration: dict = None):
577
602
  """Deploys the connector to the Fivetran platform.
@@ -583,21 +608,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
583
608
  connection (str): The connection name.
584
609
  configuration (dict): The configuration dictionary.
585
610
  """
586
- if not deploy_key or not connection:
587
- print("SEVERE: The deploy command needs the following parameters:"
588
- "\n\tRequired:\n"
589
- "\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
590
- "\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
591
- "\t(Optional):\n"
592
- "\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
593
- "\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
594
- os._exit(1)
595
-
596
- if not is_connection_name_valid(connection):
597
- print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
598
- f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
599
- f"letters, or digits (0-9). Uppercase characters are not allowed.")
600
- os._exit(1)
611
+ validate_deploy_parameters(connection, deploy_key)
601
612
 
602
613
  _check_dict(configuration, True)
603
614
 
@@ -664,22 +675,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
664
675
  if not upload_result:
665
676
  os._exit(1)
666
677
 
667
- @staticmethod
668
- def __force_sync(id: str, deploy_key: str) -> bool:
669
- """Forces a sync operation on the connection with the given ID and deployment key.
670
-
671
- Args:
672
- id (str): The connection ID.
673
- deploy_key (str): The deployment key.
674
-
675
- Returns:
676
- bool: True if the sync was successfully started, False otherwise.
677
- """
678
- resp = rq.post(f"https://api.fivetran.com/v1/connectors/{id}/sync",
679
- headers={"Authorization": f"Basic {deploy_key}"},
680
- json={"force": True})
681
- return resp.ok
682
-
683
678
  @staticmethod
684
679
  def __update_connection(id: str, name: str, group: str, config: dict, deploy_key: str):
685
680
  """Updates the connection with the given ID, name, group, configuration, and deployment key.
@@ -697,16 +692,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
697
692
  resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
698
693
  headers={"Authorization": f"Basic {deploy_key}"},
699
694
  json={
700
- "config": config,
701
- "run_setup_tests": True
695
+ "config": config,
696
+ "run_setup_tests": True
702
697
  })
703
698
 
704
699
  if not resp.ok:
705
- print(f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{resp.json()['message']}'.")
700
+ print(
701
+ f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{resp.json()['message']}'.")
706
702
  os._exit(1)
707
703
 
708
704
  @staticmethod
709
- def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[str]:
705
+ def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[Tuple[str, str]]:
710
706
  """Retrieves the connection ID for the specified connection schema name, group, and deployment key.
711
707
 
712
708
  Args:
@@ -729,7 +725,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
729
725
  if resp.json()['data']['items']:
730
726
  return resp.json()['data']['items'][0]['id'], resp.json()['data']['items'][0]['service']
731
727
 
732
- return None, None
728
+ return Optional[None, None]
733
729
 
734
730
  @staticmethod
735
731
  def __create_connection(deploy_key: str, group_id: str, config: dict) -> rq.Response:
@@ -746,12 +742,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
746
742
  response = rq.post(f"https://api.fivetran.com/v1/connectors",
747
743
  headers={"Authorization": f"Basic {deploy_key}"},
748
744
  json={
749
- "group_id": group_id,
750
- "service": "connector_sdk",
751
- "config": config,
752
- "paused": True,
753
- "run_setup_tests": True,
754
- "sync_frequency": "360",
745
+ "group_id": group_id,
746
+ "service": "connector_sdk",
747
+ "config": config,
748
+ "paused": True,
749
+ "run_setup_tests": True,
750
+ "sync_frequency": "360",
755
751
  })
756
752
  return response
757
753
 
@@ -791,7 +787,8 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
791
787
  zipf.write(file_path, arcname)
792
788
 
793
789
  if not connector_file_exists:
794
- print("SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
790
+ print(
791
+ "SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
795
792
  os._exit(1)
796
793
  return upload_filepath
797
794
 
@@ -953,12 +950,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
953
950
  configuration: dict = None,
954
951
  state: dict = None,
955
952
  log_level: Logging.Level = Logging.Level.FINE) -> bool:
956
- """Tests the connector code by running it with the connector tester.
953
+ """Tests the connector code by running it with the connector tester.\n
954
+ state.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithstatejsonfile\n
955
+ configuration.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile
957
956
 
958
957
  Args:
959
958
  project_path (str): The path to the project.
960
- configuration (dict): The configuration dictionary.
961
- state (dict): The state dictionary.
959
+ configuration (dict): The configuration dictionary, same as configuration.json if present.
960
+ state (dict): The state dictionary, same as state.json if present.
962
961
  log_level (Logging.Level): The logging level.
963
962
 
964
963
  Returns:
@@ -998,11 +997,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
998
997
  with open(download_filepath, 'wb') as fo:
999
998
  fo.write(r.content)
1000
999
  else:
1001
- print(f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or try again later ( status code: {r.status_code}), url: {download_url}")
1002
- os._exit(1)
1003
- except:
1004
- print(f"\nSEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
1005
- os._exit(1)
1000
+ raise RuntimeError(
1001
+ f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or "
1002
+ f"try again later ( status code: {r.status_code}), url: {download_url}")
1003
+ except RuntimeError:
1004
+ raise RuntimeError(
1005
+ f"SEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
1006
1006
 
1007
1007
  try:
1008
1008
  # unzip it
@@ -1016,9 +1016,9 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1016
1016
  os.chmod(java_exe, st.st_mode | stat.S_IEXEC)
1017
1017
  print("✓")
1018
1018
  except:
1019
- print(f"\nSEVERE: Failed to install the connector tester. Error details: ", traceback.format_exc())
1020
1019
  shutil.rmtree(tester_root_dir)
1021
- os._exit(1)
1020
+ raise RuntimeError(f"\nSEVERE: Failed to install the connector tester. Error details: ",
1021
+ traceback.format_exc())
1022
1022
 
1023
1023
  project_path = os.getcwd() if project_path is None else project_path
1024
1024
  self.validate_requirements_file(project_path, False)
@@ -1028,18 +1028,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1028
1028
  # Uncomment this to run the tester manually
1029
1029
  # server.wait_for_termination()
1030
1030
 
1031
- error = False
1032
1031
  try:
1033
1032
  print(f"INFO: Running connector tester...")
1034
1033
  for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path, 50051):
1035
1034
  print(log_msg, end="")
1036
1035
  except:
1037
1036
  print(traceback.format_exc())
1038
- error = True
1039
-
1040
1037
  finally:
1041
1038
  server.stop(grace=2.0)
1042
- return error
1043
1039
 
1044
1040
  @staticmethod
1045
1041
  def __java_exe(location: str, os_name: str) -> str:
@@ -1158,80 +1154,90 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1158
1154
  else:
1159
1155
  configuration = self.configuration if self.configuration else request.configuration
1160
1156
  response = self.schema_method(configuration)
1157
+ self.process_tables(response)
1158
+ return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
1161
1159
 
1162
- for entry in response:
1163
- if 'table' not in entry:
1164
- raise ValueError("Entry missing table name: " + entry)
1165
-
1166
- table_name = entry['table']
1167
-
1168
- if table_name in TABLES:
1169
- raise ValueError("Table already defined: " + table_name)
1170
-
1171
- table = common_pb2.Table(name=table_name)
1172
- columns = {}
1173
-
1174
- if "primary_key" in entry:
1175
- for pkey_name in entry["primary_key"]:
1176
- column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
1177
- column.primary_key = True
1178
- columns[pkey_name] = column
1179
-
1180
- if "columns" in entry:
1181
- for name, type in entry["columns"].items():
1182
- column = columns[name] if name in columns else common_pb2.Column(name=name)
1183
-
1184
- if isinstance(type, str):
1185
- if type.upper() == "BOOLEAN":
1186
- column.type = common_pb2.DataType.BOOLEAN
1187
- elif type.upper() == "SHORT":
1188
- column.type = common_pb2.DataType.SHORT
1189
- elif type.upper() == "INT":
1190
- column.type = common_pb2.DataType.SHORT
1191
- elif type.upper() == "LONG":
1192
- column.type = common_pb2.DataType.LONG
1193
- elif type.upper() == "DECIMAL":
1194
- raise ValueError("DECIMAL data type missing precision and scale")
1195
- elif type.upper() == "FLOAT":
1196
- column.type = common_pb2.DataType.FLOAT
1197
- elif type.upper() == "DOUBLE":
1198
- column.type = common_pb2.DataType.DOUBLE
1199
- elif type.upper() == "NAIVE_DATE":
1200
- column.type = common_pb2.DataType.NAIVE_DATE
1201
- elif type.upper() == "NAIVE_DATETIME":
1202
- column.type = common_pb2.DataType.NAIVE_DATETIME
1203
- elif type.upper() == "UTC_DATETIME":
1204
- column.type = common_pb2.DataType.UTC_DATETIME
1205
- elif type.upper() == "BINARY":
1206
- column.type = common_pb2.DataType.BINARY
1207
- elif type.upper() == "XML":
1208
- column.type = common_pb2.DataType.XML
1209
- elif type.upper() == "STRING":
1210
- column.type = common_pb2.DataType.STRING
1211
- elif type.upper() == "JSON":
1212
- column.type = common_pb2.DataType.JSON
1213
- else:
1214
- raise ValueError("Unrecognized column type encountered:: ", str(type))
1215
-
1216
- elif isinstance(type, dict):
1217
- if type['type'].upper() != "DECIMAL":
1218
- raise ValueError("Expecting DECIMAL data type")
1219
- column.type = common_pb2.DataType.DECIMAL
1220
- column.decimal.precision = type['precision']
1221
- column.decimal.scale = type['scale']
1222
-
1223
- else:
1224
- raise ValueError("Unrecognized column type: ", str(type))
1225
-
1226
- if "primary_key" in entry and name in entry["primary_key"]:
1227
- column.primary_key = True
1228
-
1229
- columns[name] = column
1230
-
1231
- table.columns.extend(columns.values())
1232
- TABLES[table_name] = table
1160
+ def process_tables(self, response):
1161
+ for entry in response:
1162
+ if 'table' not in entry:
1163
+ raise ValueError("Entry missing table name: " + entry)
1233
1164
 
1234
- return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
1165
+ table_name = entry['table']
1166
+
1167
+ if table_name in TABLES:
1168
+ raise ValueError("Table already defined: " + table_name)
1169
+
1170
+ table = common_pb2.Table(name=table_name)
1171
+ columns = {}
1172
+
1173
+ if "primary_key" in entry:
1174
+ self.process_primary_keys(columns, entry)
1175
+
1176
+ if "columns" in entry:
1177
+ self.process_columns(columns, entry)
1178
+
1179
+ table.columns.extend(columns.values())
1180
+ TABLES[table_name] = table
1181
+
1182
+ def process_primary_keys(self, columns, entry):
1183
+ for pkey_name in entry["primary_key"]:
1184
+ column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
1185
+ column.primary_key = True
1186
+ columns[pkey_name] = column
1187
+
1188
+ def process_columns(self, columns, entry):
1189
+ for name, type in entry["columns"].items():
1190
+ column = columns[name] if name in columns else common_pb2.Column(name=name)
1191
+
1192
+ if isinstance(type, str):
1193
+ self.process_data_type(column, type)
1194
+
1195
+ elif isinstance(type, dict):
1196
+ if type['type'].upper() != "DECIMAL":
1197
+ raise ValueError("Expecting DECIMAL data type")
1198
+ column.type = common_pb2.DataType.DECIMAL
1199
+ column.decimal.precision = type['precision']
1200
+ column.decimal.scale = type['scale']
1201
+
1202
+ else:
1203
+ raise ValueError("Unrecognized column type: ", str(type))
1204
+
1205
+ if "primary_key" in entry and name in entry["primary_key"]:
1206
+ column.primary_key = True
1207
+
1208
+ columns[name] = column
1209
+
1210
+ def process_data_type(self, column, type):
1211
+ if type.upper() == "BOOLEAN":
1212
+ column.type = common_pb2.DataType.BOOLEAN
1213
+ elif type.upper() == "SHORT":
1214
+ column.type = common_pb2.DataType.SHORT
1215
+ elif type.upper() == "INT":
1216
+ column.type = common_pb2.DataType.SHORT
1217
+ elif type.upper() == "LONG":
1218
+ column.type = common_pb2.DataType.LONG
1219
+ elif type.upper() == "DECIMAL":
1220
+ raise ValueError("DECIMAL data type missing precision and scale")
1221
+ elif type.upper() == "FLOAT":
1222
+ column.type = common_pb2.DataType.FLOAT
1223
+ elif type.upper() == "DOUBLE":
1224
+ column.type = common_pb2.DataType.DOUBLE
1225
+ elif type.upper() == "NAIVE_DATE":
1226
+ column.type = common_pb2.DataType.NAIVE_DATE
1227
+ elif type.upper() == "NAIVE_DATETIME":
1228
+ column.type = common_pb2.DataType.NAIVE_DATETIME
1229
+ elif type.upper() == "UTC_DATETIME":
1230
+ column.type = common_pb2.DataType.UTC_DATETIME
1231
+ elif type.upper() == "BINARY":
1232
+ column.type = common_pb2.DataType.BINARY
1233
+ elif type.upper() == "XML":
1234
+ column.type = common_pb2.DataType.XML
1235
+ elif type.upper() == "STRING":
1236
+ column.type = common_pb2.DataType.STRING
1237
+ elif type.upper() == "JSON":
1238
+ column.type = common_pb2.DataType.JSON
1239
+ else:
1240
+ raise ValueError("Unrecognized column type encountered:: ", str(type))
1235
1241
 
1236
1242
  def Update(self, request, context):
1237
1243
  """Overrides the Update method from ConnectorServicer.
@@ -1280,14 +1286,16 @@ def find_connector_object(project_path) -> Connector:
1280
1286
  if '<fivetran_connector_sdk.Connector object at' in str(obj_attr):
1281
1287
  return obj_attr
1282
1288
 
1283
- print("SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
1289
+ print(
1290
+ "SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
1284
1291
  sys.exit(1)
1285
1292
 
1286
1293
 
1287
1294
  def suggest_correct_command(input_command: str) -> bool:
1288
1295
  # for typos
1289
1296
  # calculate the edit distance of the input command (lowercased) with each of the valid commands
1290
- edit_distances_of_commands = sorted([(command, edit_distance(command, input_command.lower())) for command in VALID_COMMANDS], key=lambda x: x[1])
1297
+ edit_distances_of_commands = sorted(
1298
+ [(command, edit_distance(command, input_command.lower())) for command in VALID_COMMANDS], key=lambda x: x[1])
1291
1299
 
1292
1300
  if edit_distances_of_commands[0][1] <= MAX_ALLOWED_EDIT_DISTANCE_FROM_VALID_COMMAND:
1293
1301
  # if the closest command is within the max allowed edit distance, we suggest that command
@@ -1315,15 +1323,15 @@ def edit_distance(first_string: str, second_string: str) -> int:
1315
1323
  first_string_length: int = len(first_string)
1316
1324
  second_string_length: int = len(second_string)
1317
1325
 
1318
- # Initialize the previous row of distances (for the base case of an empty first string)
1319
- # 'previous_row[j]' holds the edit distance between an empty prefix of 'first_string' and the first 'j' characters of 'second_string'.
1326
+ # Initialize the previous row of distances (for the base case of an empty first string) 'previous_row[j]' holds
1327
+ # the edit distance between an empty prefix of 'first_string' and the first 'j' characters of 'second_string'.
1320
1328
  # The first row is filled with values [0, 1, 2, ..., second_string_length]
1321
1329
  previous_row: list[int] = list(range(second_string_length + 1))
1322
1330
 
1323
1331
  # Rest of the rows
1324
1332
  for first_string_index in range(1, first_string_length + 1):
1325
1333
  # Start the current row with the distance for an empty second string
1326
- current_row: list[int] = [first_string_index] # j = 0
1334
+ current_row: list[int] = [first_string_index]
1327
1335
 
1328
1336
  # Iterate over each character in the second string
1329
1337
  for second_string_index in range(1, second_string_length + 1):
@@ -1332,7 +1340,8 @@ def edit_distance(first_string: str, second_string: str) -> int:
1332
1340
  current_row.append(previous_row[second_string_index - 1])
1333
1341
  else:
1334
1342
  # Minimum cost of insertion, deletion, or substitution
1335
- current_row.append(1 + min(current_row[-1], previous_row[second_string_index], previous_row[second_string_index - 1]))
1343
+ current_row.append(
1344
+ 1 + min(current_row[-1], previous_row[second_string_index], previous_row[second_string_index - 1]))
1336
1345
 
1337
1346
  # Move to the next row
1338
1347
  previous_row = current_row
@@ -1374,16 +1383,40 @@ def main():
1374
1383
  configuration = args.configuration if args.configuration else None
1375
1384
  state = args.state if args.state else os.getenv('FIVETRAN_STATE', None)
1376
1385
 
1386
+ configuration = validate_and_load_configuration(args, configuration)
1387
+ state = validate_and_load_state(args, state)
1388
+
1389
+ if args.command.lower() == "deploy":
1390
+ if args.state:
1391
+ print("WARNING: 'state' parameter is not used for 'deploy' command")
1392
+ connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
1393
+
1394
+ elif args.command.lower() == "debug":
1395
+ connector_object.debug(args.project_path, configuration, state)
1396
+
1397
+ elif args.command.lower() == "reset":
1398
+ reset_local_file_directory(args)
1399
+ else:
1400
+ if not suggest_correct_command(args.command):
1401
+ raise NotImplementedError(f"Invalid command: {args.command}, see `fivetran --help`")
1402
+
1403
+
1404
+ def validate_and_load_configuration(args, configuration):
1377
1405
  if configuration:
1378
1406
  json_filepath = os.path.join(args.project_path, args.configuration)
1379
1407
  if os.path.isfile(json_filepath):
1380
1408
  with open(json_filepath, 'r') as fi:
1381
1409
  configuration = json.load(fi)
1382
1410
  else:
1383
- raise ValueError("Configuration must be provided as a JSON file. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
1411
+ raise ValueError(
1412
+ "Configuration must be provided as a JSON file. Please check your input. Reference: "
1413
+ "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
1384
1414
  else:
1385
1415
  configuration = {}
1416
+ return configuration
1386
1417
 
1418
+
1419
+ def validate_and_load_state(args, state):
1387
1420
  if state:
1388
1421
  json_filepath = os.path.join(args.project_path, args.state)
1389
1422
  if os.path.isfile(json_filepath):
@@ -1393,31 +1426,23 @@ def main():
1393
1426
  state = json.loads(state)
1394
1427
  else:
1395
1428
  state = {}
1429
+ return state
1396
1430
 
1397
- if args.command.lower() == "deploy":
1398
- if args.state:
1399
- print("WARNING: 'state' parameter is not used for 'deploy' command")
1400
- connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
1401
-
1402
- elif args.command.lower() == "debug":
1403
- connector_object.debug(args.project_path, configuration, state)
1404
1431
 
1405
- elif args.command.lower() == "reset":
1406
- files_path = os.path.join(args.project_path, OUTPUT_FILES_DIR)
1407
- confirm = input("This will delete your current state and `warehouse.db` files. Do you want to continue? (Y/N): ")
1408
- if confirm.lower() != "y":
1409
- print("INFO: Reset canceled")
1410
- else:
1411
- try:
1412
- if os.path.exists(files_path) and os.path.isdir(files_path):
1413
- shutil.rmtree(files_path)
1414
- print("INFO: Reset Successful")
1415
- except Exception as e:
1416
- print("ERROR: Reset Failed")
1417
- raise e
1432
+ def reset_local_file_directory(args):
1433
+ files_path = os.path.join(args.project_path, OUTPUT_FILES_DIR)
1434
+ confirm = input(
1435
+ "This will delete your current state and `warehouse.db` files. Do you want to continue? (Y/N): ")
1436
+ if confirm.lower() != "y":
1437
+ print("INFO: Reset canceled")
1418
1438
  else:
1419
- if not suggest_correct_command(args.command):
1420
- raise NotImplementedError(f"Invalid command: {args.command}, see `fivetran --help`")
1439
+ try:
1440
+ if os.path.exists(files_path) and os.path.isdir(files_path):
1441
+ shutil.rmtree(files_path)
1442
+ print("INFO: Reset Successful")
1443
+ except Exception as e:
1444
+ print("ERROR: Reset Failed")
1445
+ raise e
1421
1446
 
1422
1447
 
1423
1448
  if __name__ == "__main__":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fivetran_connector_sdk
3
- Version: 0.9.30.1
3
+ Version: 0.10.7.1
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk