fivetran-connector-sdk 0.9.30.1__tar.gz → 0.10.15.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/PKG-INFO +3 -1
  2. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/README.md +2 -0
  3. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/__init__.py +301 -276
  4. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/PKG-INFO +3 -1
  5. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/pyproject.toml +0 -0
  6. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/setup.cfg +0 -0
  7. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/__init__.py +0 -0
  8. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/common_pb2.py +0 -0
  9. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/common_pb2.pyi +0 -0
  10. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/common_pb2_grpc.py +0 -0
  11. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.py +0 -0
  12. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2.pyi +0 -0
  13. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py +0 -0
  14. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/SOURCES.txt +0 -0
  15. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/dependency_links.txt +0 -0
  16. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/entry_points.txt +0 -0
  17. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/requires.txt +0 -0
  18. {fivetran_connector_sdk-0.9.30.1 → fivetran_connector_sdk-0.10.15.1}/src/fivetran_connector_sdk.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fivetran_connector_sdk
3
- Version: 0.9.30.1
3
+ Version: 0.10.15.1
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
@@ -17,6 +17,8 @@ Requires-Dist: get_pypi_latest_version==0.0.12
17
17
  Requires-Dist: pipreqs==0.5.0
18
18
 
19
19
  # **fivetran-connector-sdk**
20
+ [![Downloads](https://static.pepy.tech/badge/fivetran-connector-sdk)](https://pepy.tech/project/fivetran-connector-sdk)
21
+
20
22
  The *fivetran-connector-sdk* allows users to execute custom, self-written Python code within [Fivetran's](https://www.fivetran.com/) secure cloud environment. Fivetran automatically manages running the connectors on your scheduled frequency and manages the required compute resources.
21
23
 
22
24
  The Connector SDK service is the best fit for the following use cases:
@@ -1,4 +1,6 @@
1
1
  # **fivetran-connector-sdk**
2
+ [![Downloads](https://static.pepy.tech/badge/fivetran-connector-sdk)](https://pepy.tech/project/fivetran-connector-sdk)
3
+
2
4
  The *fivetran-connector-sdk* allows users to execute custom, self-written Python code within [Fivetran's](https://www.fivetran.com/) secure cloud environment. Fivetran automatically manages running the connectors on your scheduled frequency and manages the required compute resources.
3
5
 
4
6
  The Connector SDK service is the best fit for the following use cases:
@@ -1,5 +1,5 @@
1
1
  import argparse
2
- from typing import Optional
2
+ from typing import Optional, Tuple
3
3
 
4
4
  import grpc
5
5
  import importlib.util
@@ -25,7 +25,7 @@ from fivetran_connector_sdk.protos import common_pb2
25
25
  from fivetran_connector_sdk.protos import connector_sdk_pb2
26
26
  from fivetran_connector_sdk.protos import connector_sdk_pb2_grpc
27
27
 
28
- __version__ = "0.9.30.1"
28
+ __version__ = "0.10.15.1"
29
29
 
30
30
  MAC_OS = "mac"
31
31
  WIN_OS = "windows"
@@ -38,6 +38,7 @@ UPLOAD_FILENAME = "code.zip"
38
38
  LAST_VERSION_CHECK_FILE = "_last_version_check"
39
39
  ROOT_LOCATION = ".ft_sdk_connector_tester"
40
40
  OUTPUT_FILES_DIR = "files"
41
+ REQUIREMENTS_TXT = "requirements.txt"
41
42
  ONE_DAY_IN_SEC = 24 * 60 * 60
42
43
 
43
44
  EXCLUDED_DIRS = ["__pycache__", "lib", "include", OUTPUT_FILES_DIR]
@@ -121,7 +122,7 @@ class Logging:
121
122
  class Operations:
122
123
  @staticmethod
123
124
  def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
124
- """Performs an upsert operation on the specified table with the given data, deleting any existing value with the same primary key.
125
+ """Updates records with the same primary key if already present in the destination. Inserts new records if not already present in the destination.
125
126
 
126
127
  Args:
127
128
  table (str): The name of the table.
@@ -228,8 +229,8 @@ class Operations:
228
229
  """
229
230
  _yield_check(inspect.stack())
230
231
  return connector_sdk_pb2.UpdateResponse(
231
- operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
232
- state_json=json.dumps(state))))
232
+ operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
233
+ state_json=json.dumps(state))))
233
234
 
234
235
 
235
236
  def check_newer_version():
@@ -295,76 +296,85 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
295
296
  if v is None:
296
297
  mapped_data[k] = common_pb2.ValueType(null=True)
297
298
  elif (k in columns) and columns[k].type != common_pb2.DataType.UNSPECIFIED:
298
- if columns[k].type == common_pb2.DataType.BOOLEAN:
299
- mapped_data[k] = common_pb2.ValueType(bool=v)
300
- elif columns[k].type == common_pb2.DataType.SHORT:
301
- mapped_data[k] = common_pb2.ValueType(short=v)
302
- elif columns[k].type == common_pb2.DataType.INT:
303
- mapped_data[k] = common_pb2.ValueType(int=v)
304
- elif columns[k].type == common_pb2.DataType.LONG:
305
- mapped_data[k] = common_pb2.ValueType(long=v)
306
- elif columns[k].type == common_pb2.DataType.DECIMAL:
307
- mapped_data[k] = common_pb2.ValueType(decimal=v)
308
- elif columns[k].type == common_pb2.DataType.FLOAT:
309
- mapped_data[k] = common_pb2.ValueType(float=v)
310
- elif columns[k].type == common_pb2.DataType.DOUBLE:
311
- mapped_data[k] = common_pb2.ValueType(double=v)
312
- elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
313
- timestamp = timestamp_pb2.Timestamp()
314
- dt = datetime.strptime(v, "%Y-%m-%d")
315
- timestamp.FromDatetime(dt)
316
- mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
317
- elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
318
- if '.' not in v: v = v + ".0"
319
- timestamp = timestamp_pb2.Timestamp()
320
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
321
- timestamp.FromDatetime(dt)
322
- mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
323
- elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
324
- timestamp = timestamp_pb2.Timestamp()
325
- if '.' in v:
326
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
327
- else:
328
- dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
329
- timestamp.FromDatetime(dt)
330
- mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
331
- elif columns[k].type == common_pb2.DataType.BINARY:
332
- mapped_data[k] = common_pb2.ValueType(binary=v)
333
- elif columns[k].type == common_pb2.DataType.XML:
334
- mapped_data[k] = common_pb2.ValueType(xml=v)
335
- elif columns[k].type == common_pb2.DataType.STRING:
336
- incoming = v if isinstance(v, str) else str(v)
337
- mapped_data[k] = common_pb2.ValueType(string=incoming)
338
- elif columns[k].type == common_pb2.DataType.JSON:
339
- mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
340
- else:
341
- raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
299
+ map_defined_data_type(columns, k, mapped_data, v)
342
300
  else:
343
- # We can infer type from the value
344
- if isinstance(v, int):
345
- if abs(v) > 2147483647:
346
- mapped_data[k] = common_pb2.ValueType(long=v)
347
- else:
348
- mapped_data[k] = common_pb2.ValueType(int=v)
349
- elif isinstance(v, float):
350
- mapped_data[k] = common_pb2.ValueType(float=v)
351
- elif isinstance(v, bool):
352
- mapped_data[k] = common_pb2.ValueType(bool=v)
353
- elif isinstance(v, bytes):
354
- mapped_data[k] = common_pb2.ValueType(binary=v)
355
- elif isinstance(v, list):
356
- raise ValueError("Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
357
- elif isinstance(v, dict):
358
- mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
359
- elif isinstance(v, str):
360
- mapped_data[k] = common_pb2.ValueType(string=v)
361
- else:
362
- # Convert arbitrary objects to string
363
- mapped_data[k] = common_pb2.ValueType(string=str(v))
301
+ map_inferred_data_type(k, mapped_data, v)
364
302
 
365
303
  return mapped_data
366
304
 
367
305
 
306
+ def map_inferred_data_type(k, mapped_data, v):
307
+ # We can infer type from the value
308
+ if isinstance(v, int):
309
+ if abs(v) > 2147483647:
310
+ mapped_data[k] = common_pb2.ValueType(long=v)
311
+ else:
312
+ mapped_data[k] = common_pb2.ValueType(int=v)
313
+ elif isinstance(v, float):
314
+ mapped_data[k] = common_pb2.ValueType(float=v)
315
+ elif isinstance(v, bool):
316
+ mapped_data[k] = common_pb2.ValueType(bool=v)
317
+ elif isinstance(v, bytes):
318
+ mapped_data[k] = common_pb2.ValueType(binary=v)
319
+ elif isinstance(v, list):
320
+ raise ValueError(
321
+ "Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
322
+ elif isinstance(v, dict):
323
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
324
+ elif isinstance(v, str):
325
+ mapped_data[k] = common_pb2.ValueType(string=v)
326
+ else:
327
+ # Convert arbitrary objects to string
328
+ mapped_data[k] = common_pb2.ValueType(string=str(v))
329
+
330
+
331
+ def map_defined_data_type(columns, k, mapped_data, v):
332
+ if columns[k].type == common_pb2.DataType.BOOLEAN:
333
+ mapped_data[k] = common_pb2.ValueType(bool=v)
334
+ elif columns[k].type == common_pb2.DataType.SHORT:
335
+ mapped_data[k] = common_pb2.ValueType(short=v)
336
+ elif columns[k].type == common_pb2.DataType.INT:
337
+ mapped_data[k] = common_pb2.ValueType(int=v)
338
+ elif columns[k].type == common_pb2.DataType.LONG:
339
+ mapped_data[k] = common_pb2.ValueType(long=v)
340
+ elif columns[k].type == common_pb2.DataType.DECIMAL:
341
+ mapped_data[k] = common_pb2.ValueType(decimal=v)
342
+ elif columns[k].type == common_pb2.DataType.FLOAT:
343
+ mapped_data[k] = common_pb2.ValueType(float=v)
344
+ elif columns[k].type == common_pb2.DataType.DOUBLE:
345
+ mapped_data[k] = common_pb2.ValueType(double=v)
346
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
347
+ timestamp = timestamp_pb2.Timestamp()
348
+ dt = datetime.strptime(v, "%Y-%m-%d")
349
+ timestamp.FromDatetime(dt)
350
+ mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
351
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
352
+ if '.' not in v: v = v + ".0"
353
+ timestamp = timestamp_pb2.Timestamp()
354
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
355
+ timestamp.FromDatetime(dt)
356
+ mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
357
+ elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
358
+ timestamp = timestamp_pb2.Timestamp()
359
+ if '.' in v:
360
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
361
+ else:
362
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
363
+ timestamp.FromDatetime(dt)
364
+ mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
365
+ elif columns[k].type == common_pb2.DataType.BINARY:
366
+ mapped_data[k] = common_pb2.ValueType(binary=v)
367
+ elif columns[k].type == common_pb2.DataType.XML:
368
+ mapped_data[k] = common_pb2.ValueType(xml=v)
369
+ elif columns[k].type == common_pb2.DataType.STRING:
370
+ incoming = v if isinstance(v, str) else str(v)
371
+ mapped_data[k] = common_pb2.ValueType(string=incoming)
372
+ elif columns[k].type == common_pb2.DataType.JSON:
373
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
374
+ else:
375
+ raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
376
+
377
+
368
378
  def _yield_check(stack):
369
379
  """Checks for the presence of 'yield' in the calling code.
370
380
  Args:
@@ -384,11 +394,13 @@ def _yield_check(stack):
384
394
  calling_code = stack[1].code_context[0]
385
395
  if f"{called_method}(" in calling_code:
386
396
  if 'yield' not in calling_code:
387
- print(f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
397
+ print(
398
+ f"SEVERE: Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'")
388
399
  os._exit(1)
389
400
  else:
390
401
  # This should never happen
391
- raise RuntimeError(f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
402
+ raise RuntimeError(
403
+ f"The '{called_method}' function is missing in the connector. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
392
404
 
393
405
 
394
406
  def _check_dict(incoming: dict, string_only: bool = False) -> dict:
@@ -405,12 +417,14 @@ def _check_dict(incoming: dict, string_only: bool = False) -> dict:
405
417
  return {}
406
418
 
407
419
  if not isinstance(incoming, dict):
408
- raise ValueError("Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
420
+ raise ValueError(
421
+ "Configuration must be provided as a JSON dictionary. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
409
422
 
410
423
  if string_only:
411
424
  for k, v in incoming.items():
412
425
  if not isinstance(v, str):
413
- print("SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
426
+ print(
427
+ "SEVERE: All values in the configuration must be STRING. Please check your configuration and ensure that every value is a STRING.")
414
428
  os._exit(1)
415
429
 
416
430
  return incoming
@@ -436,6 +450,23 @@ def log_unused_deps_error(package_name: str, version: str):
436
450
  os._exit(1)
437
451
 
438
452
 
453
+ def validate_deploy_parameters(connection, deploy_key):
454
+ if not deploy_key or not connection:
455
+ print("SEVERE: The deploy command needs the following parameters:"
456
+ "\n\tRequired:\n"
457
+ "\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
458
+ "\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
459
+ "\t(Optional):\n"
460
+ "\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
461
+ "\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
462
+ os._exit(1)
463
+ if not is_connection_name_valid(connection):
464
+ print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
465
+ f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
466
+ f"letters, or digits (0-9). Uppercase characters are not allowed.")
467
+ os._exit(1)
468
+
469
+
439
470
  class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
440
471
  def __init__(self, update, schema=None):
441
472
  """Initializes the Connector instance.
@@ -450,22 +481,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
450
481
  self.configuration = None
451
482
  self.state = None
452
483
 
453
- @staticmethod
454
- def __unpause_connection(id: str, deploy_key: str) -> bool:
455
- """Unpauses the connection with the given ID and deployment key.
456
-
457
- Args:
458
- id (str): The connection ID.
459
- deploy_key (str): The deployment key.
460
-
461
- Returns:
462
- bool: True if the connection was successfully unpaused, False otherwise.
463
- """
464
- resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
465
- headers={"Authorization": f"Basic {deploy_key}"},
466
- json={"force": True})
467
- return resp.ok
468
-
469
484
  @staticmethod
470
485
  def fetch_requirements_from_file(file_path: str) -> list[str]:
471
486
  """Reads a requirements file and returns a list of dependencies.
@@ -526,13 +541,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
526
541
  os.remove(tmp_requirements_file_path)
527
542
 
528
543
  if len(tmp_requirements) > 0:
529
- if os.path.exists("requirements.txt"):
530
- requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
531
- else:
532
- with open("requirements.txt", 'w'):
533
- pass
534
- requirements = {}
535
- print("WARNING: Adding `requirements.txt` file to your project folder.")
544
+ requirements = self.load_or_add_requirements_file(project_path)
536
545
 
537
546
  version_mismatch_deps = {key: tmp_requirements[key] for key in
538
547
  (requirements.keys() & tmp_requirements.keys())
@@ -543,35 +552,51 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
543
552
 
544
553
  missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
545
554
  if missing_deps:
546
- log_level = "ERROR" if is_deploy else "WARNING"
547
- print(log_level +
548
- ": Please include the following dependency libraries in requirements.txt, to be used by "
549
- "Fivetran production. "
550
- "For more information, please visit: "
551
- "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
552
- "#workingwithrequirementstxtfile")
553
- print(*list(missing_deps.values()))
554
- if is_deploy:
555
- os._exit(1)
555
+ self.handle_missing_deps(is_deploy, missing_deps)
556
556
 
557
557
  unused_deps = list(requirements.keys() - tmp_requirements.keys())
558
558
  if unused_deps:
559
- if 'fivetran_connector_sdk' in unused_deps:
560
- log_unused_deps_error("fivetran_connector_sdk", __version__)
561
- elif 'requests' in unused_deps:
562
- log_unused_deps_error("requests", "2.32.3")
563
- else:
564
- print("INFO: The following dependencies are not needed, "
565
- "they are not used or already installed. Please remove them from requirements.txt:")
566
- print(*unused_deps)
559
+ self.handle_unused_deps(unused_deps)
567
560
  else:
568
- if os.path.exists("requirements.txt"):
561
+ if os.path.exists(REQUIREMENTS_TXT):
569
562
  print("WARNING: `requirements.txt` is not required as no additional "
570
563
  "Python libraries are required or all required libraries for "
571
564
  "your code are pre-installed.")
572
565
 
573
566
  if is_deploy: print("Successful validation of requirements.txt")
574
567
 
568
+ def handle_unused_deps(self, unused_deps):
569
+ if 'fivetran_connector_sdk' in unused_deps:
570
+ log_unused_deps_error("fivetran_connector_sdk", __version__)
571
+ elif 'requests' in unused_deps:
572
+ log_unused_deps_error("requests", "2.32.3")
573
+ else:
574
+ print("INFO: The following dependencies are not needed, "
575
+ "they are not used or already installed. Please remove them from requirements.txt:")
576
+ print(*unused_deps)
577
+
578
+ def handle_missing_deps(self, is_deploy, missing_deps):
579
+ log_level = "ERROR" if is_deploy else "WARNING"
580
+ print(log_level +
581
+ ": Please include the following dependency libraries in requirements.txt, to be used by "
582
+ "Fivetran production. "
583
+ "For more information, please visit: "
584
+ "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
585
+ "#workingwithrequirementstxtfile")
586
+ print(*list(missing_deps.values()))
587
+ if is_deploy:
588
+ os._exit(1)
589
+
590
+ def load_or_add_requirements_file(self, project_path):
591
+ if os.path.exists(REQUIREMENTS_TXT):
592
+ requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
593
+ else:
594
+ with open(REQUIREMENTS_TXT, 'w'):
595
+ pass
596
+ requirements = {}
597
+ print("WARNING: Adding `requirements.txt` file to your project folder.")
598
+ return requirements
599
+
575
600
  # Call this method to deploy the connector to Fivetran platform
576
601
  def deploy(self, project_path: str, deploy_key: str, group: str, connection: str, configuration: dict = None):
577
602
  """Deploys the connector to the Fivetran platform.
@@ -583,21 +608,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
583
608
  connection (str): The connection name.
584
609
  configuration (dict): The configuration dictionary.
585
610
  """
586
- if not deploy_key or not connection:
587
- print("SEVERE: The deploy command needs the following parameters:"
588
- "\n\tRequired:\n"
589
- "\t\t--api-key <BASE64-ENCODED-FIVETRAN-API-KEY-FOR-DEPLOYMENT>\n"
590
- "\t\t--connection <VALID-CONNECTOR-SCHEMA_NAME>\n"
591
- "\t(Optional):\n"
592
- "\t\t--destination <DESTINATION_NAME> (Becomes required if there are multiple destinations)\n"
593
- "\t\t--configuration <CONFIGURATION_FILE> (Completely replaces the existing configuration)")
594
- os._exit(1)
595
-
596
- if not is_connection_name_valid(connection):
597
- print(f"SEVERE: Connection name: {connection} is invalid!\n The connection name should start with an "
598
- f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
599
- f"letters, or digits (0-9). Uppercase characters are not allowed.")
600
- os._exit(1)
611
+ validate_deploy_parameters(connection, deploy_key)
601
612
 
602
613
  _check_dict(configuration, True)
603
614
 
@@ -616,8 +627,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
616
627
  self.validate_requirements_file(project_path, True)
617
628
 
618
629
  group_id, group_name = self.__get_group_info(group, deploy_key)
619
- connection_id, service = self.__get_connection_id(
620
- connection, group, group_id, deploy_key)
630
+ connection_id, service = self.__get_connection_id(connection, group, group_id, deploy_key) or (None, None)
621
631
 
622
632
  if connection_id:
623
633
  if service != 'connector_sdk':
@@ -664,22 +674,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
664
674
  if not upload_result:
665
675
  os._exit(1)
666
676
 
667
- @staticmethod
668
- def __force_sync(id: str, deploy_key: str) -> bool:
669
- """Forces a sync operation on the connection with the given ID and deployment key.
670
-
671
- Args:
672
- id (str): The connection ID.
673
- deploy_key (str): The deployment key.
674
-
675
- Returns:
676
- bool: True if the sync was successfully started, False otherwise.
677
- """
678
- resp = rq.post(f"https://api.fivetran.com/v1/connectors/{id}/sync",
679
- headers={"Authorization": f"Basic {deploy_key}"},
680
- json={"force": True})
681
- return resp.ok
682
-
683
677
  @staticmethod
684
678
  def __update_connection(id: str, name: str, group: str, config: dict, deploy_key: str):
685
679
  """Updates the connection with the given ID, name, group, configuration, and deployment key.
@@ -697,16 +691,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
697
691
  resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
698
692
  headers={"Authorization": f"Basic {deploy_key}"},
699
693
  json={
700
- "config": config,
701
- "run_setup_tests": True
694
+ "config": config,
695
+ "run_setup_tests": True
702
696
  })
703
697
 
704
698
  if not resp.ok:
705
- print(f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{resp.json()['message']}'.")
699
+ print(
700
+ f"SEVERE: Unable to update Connection '{name}' in destination '{group}', failed with error: '{resp.json()['message']}'.")
706
701
  os._exit(1)
707
702
 
708
703
  @staticmethod
709
- def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[str]:
704
+ def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> Optional[Tuple[str, str]]:
710
705
  """Retrieves the connection ID for the specified connection schema name, group, and deployment key.
711
706
 
712
707
  Args:
@@ -729,7 +724,7 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
729
724
  if resp.json()['data']['items']:
730
725
  return resp.json()['data']['items'][0]['id'], resp.json()['data']['items'][0]['service']
731
726
 
732
- return None, None
727
+ return None
733
728
 
734
729
  @staticmethod
735
730
  def __create_connection(deploy_key: str, group_id: str, config: dict) -> rq.Response:
@@ -746,12 +741,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
746
741
  response = rq.post(f"https://api.fivetran.com/v1/connectors",
747
742
  headers={"Authorization": f"Basic {deploy_key}"},
748
743
  json={
749
- "group_id": group_id,
750
- "service": "connector_sdk",
751
- "config": config,
752
- "paused": True,
753
- "run_setup_tests": True,
754
- "sync_frequency": "360",
744
+ "group_id": group_id,
745
+ "service": "connector_sdk",
746
+ "config": config,
747
+ "paused": True,
748
+ "run_setup_tests": True,
749
+ "sync_frequency": "360",
755
750
  })
756
751
  return response
757
752
 
@@ -791,7 +786,8 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
791
786
  zipf.write(file_path, arcname)
792
787
 
793
788
  if not connector_file_exists:
794
- print("SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
789
+ print(
790
+ "SEVERE: The 'connector.py' file is missing. Please ensure that 'connector.py' is present in your project directory, and that the file name is in lowercase letters. All custom connectors require this file because Fivetran calls it to start a sync.")
795
791
  os._exit(1)
796
792
  return upload_filepath
797
793
 
@@ -953,12 +949,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
953
949
  configuration: dict = None,
954
950
  state: dict = None,
955
951
  log_level: Logging.Level = Logging.Level.FINE) -> bool:
956
- """Tests the connector code by running it with the connector tester.
952
+ """Tests the connector code by running it with the connector tester.\n
953
+ state.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithstatejsonfile\n
954
+ configuration.json docs: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile
957
955
 
958
956
  Args:
959
957
  project_path (str): The path to the project.
960
- configuration (dict): The configuration dictionary.
961
- state (dict): The state dictionary.
958
+ configuration (dict): The configuration dictionary, same as configuration.json if present.
959
+ state (dict): The state dictionary, same as state.json if present.
962
960
  log_level (Logging.Level): The logging level.
963
961
 
964
962
  Returns:
@@ -998,11 +996,12 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
998
996
  with open(download_filepath, 'wb') as fo:
999
997
  fo.write(r.content)
1000
998
  else:
1001
- print(f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or try again later ( status code: {r.status_code}), url: {download_url}")
1002
- os._exit(1)
1003
- except:
1004
- print(f"\nSEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
1005
- os._exit(1)
999
+ raise RuntimeError(
1000
+ f"\nSEVERE: Failed to download the connector tester. Please check your access permissions or "
1001
+ f"try again later ( status code: {r.status_code}), url: {download_url}")
1002
+ except RuntimeError:
1003
+ raise RuntimeError(
1004
+ f"SEVERE: Failed to download the connector tester. Error details: {traceback.format_exc()}")
1006
1005
 
1007
1006
  try:
1008
1007
  # unzip it
@@ -1016,9 +1015,9 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1016
1015
  os.chmod(java_exe, st.st_mode | stat.S_IEXEC)
1017
1016
  print("✓")
1018
1017
  except:
1019
- print(f"\nSEVERE: Failed to install the connector tester. Error details: ", traceback.format_exc())
1020
1018
  shutil.rmtree(tester_root_dir)
1021
- os._exit(1)
1019
+ raise RuntimeError(f"\nSEVERE: Failed to install the connector tester. Error details: ",
1020
+ traceback.format_exc())
1022
1021
 
1023
1022
  project_path = os.getcwd() if project_path is None else project_path
1024
1023
  self.validate_requirements_file(project_path, False)
@@ -1028,18 +1027,14 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1028
1027
  # Uncomment this to run the tester manually
1029
1028
  # server.wait_for_termination()
1030
1029
 
1031
- error = False
1032
1030
  try:
1033
1031
  print(f"INFO: Running connector tester...")
1034
1032
  for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path, 50051):
1035
1033
  print(log_msg, end="")
1036
1034
  except:
1037
1035
  print(traceback.format_exc())
1038
- error = True
1039
-
1040
1036
  finally:
1041
1037
  server.stop(grace=2.0)
1042
- return error
1043
1038
 
1044
1039
  @staticmethod
1045
1040
  def __java_exe(location: str, os_name: str) -> str:
@@ -1158,80 +1153,90 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
1158
1153
  else:
1159
1154
  configuration = self.configuration if self.configuration else request.configuration
1160
1155
  response = self.schema_method(configuration)
1156
+ self.process_tables(response)
1157
+ return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
1161
1158
 
1162
- for entry in response:
1163
- if 'table' not in entry:
1164
- raise ValueError("Entry missing table name: " + entry)
1165
-
1166
- table_name = entry['table']
1167
-
1168
- if table_name in TABLES:
1169
- raise ValueError("Table already defined: " + table_name)
1170
-
1171
- table = common_pb2.Table(name=table_name)
1172
- columns = {}
1173
-
1174
- if "primary_key" in entry:
1175
- for pkey_name in entry["primary_key"]:
1176
- column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
1177
- column.primary_key = True
1178
- columns[pkey_name] = column
1179
-
1180
- if "columns" in entry:
1181
- for name, type in entry["columns"].items():
1182
- column = columns[name] if name in columns else common_pb2.Column(name=name)
1183
-
1184
- if isinstance(type, str):
1185
- if type.upper() == "BOOLEAN":
1186
- column.type = common_pb2.DataType.BOOLEAN
1187
- elif type.upper() == "SHORT":
1188
- column.type = common_pb2.DataType.SHORT
1189
- elif type.upper() == "INT":
1190
- column.type = common_pb2.DataType.SHORT
1191
- elif type.upper() == "LONG":
1192
- column.type = common_pb2.DataType.LONG
1193
- elif type.upper() == "DECIMAL":
1194
- raise ValueError("DECIMAL data type missing precision and scale")
1195
- elif type.upper() == "FLOAT":
1196
- column.type = common_pb2.DataType.FLOAT
1197
- elif type.upper() == "DOUBLE":
1198
- column.type = common_pb2.DataType.DOUBLE
1199
- elif type.upper() == "NAIVE_DATE":
1200
- column.type = common_pb2.DataType.NAIVE_DATE
1201
- elif type.upper() == "NAIVE_DATETIME":
1202
- column.type = common_pb2.DataType.NAIVE_DATETIME
1203
- elif type.upper() == "UTC_DATETIME":
1204
- column.type = common_pb2.DataType.UTC_DATETIME
1205
- elif type.upper() == "BINARY":
1206
- column.type = common_pb2.DataType.BINARY
1207
- elif type.upper() == "XML":
1208
- column.type = common_pb2.DataType.XML
1209
- elif type.upper() == "STRING":
1210
- column.type = common_pb2.DataType.STRING
1211
- elif type.upper() == "JSON":
1212
- column.type = common_pb2.DataType.JSON
1213
- else:
1214
- raise ValueError("Unrecognized column type encountered:: ", str(type))
1215
-
1216
- elif isinstance(type, dict):
1217
- if type['type'].upper() != "DECIMAL":
1218
- raise ValueError("Expecting DECIMAL data type")
1219
- column.type = common_pb2.DataType.DECIMAL
1220
- column.decimal.precision = type['precision']
1221
- column.decimal.scale = type['scale']
1222
-
1223
- else:
1224
- raise ValueError("Unrecognized column type: ", str(type))
1225
-
1226
- if "primary_key" in entry and name in entry["primary_key"]:
1227
- column.primary_key = True
1228
-
1229
- columns[name] = column
1230
-
1231
- table.columns.extend(columns.values())
1232
- TABLES[table_name] = table
1159
+ def process_tables(self, response):
1160
+ for entry in response:
1161
+ if 'table' not in entry:
1162
+ raise ValueError("Entry missing table name: " + entry)
1233
1163
 
1234
- return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
1164
+ table_name = entry['table']
1165
+
1166
+ if table_name in TABLES:
1167
+ raise ValueError("Table already defined: " + table_name)
1168
+
1169
+ table = common_pb2.Table(name=table_name)
1170
+ columns = {}
1171
+
1172
+ if "primary_key" in entry:
1173
+ self.process_primary_keys(columns, entry)
1174
+
1175
+ if "columns" in entry:
1176
+ self.process_columns(columns, entry)
1177
+
1178
+ table.columns.extend(columns.values())
1179
+ TABLES[table_name] = table
1180
+
1181
+ def process_primary_keys(self, columns, entry):
1182
+ for pkey_name in entry["primary_key"]:
1183
+ column = columns[pkey_name] if pkey_name in columns else common_pb2.Column(name=pkey_name)
1184
+ column.primary_key = True
1185
+ columns[pkey_name] = column
1186
+
1187
+ def process_columns(self, columns, entry):
1188
+ for name, type in entry["columns"].items():
1189
+ column = columns[name] if name in columns else common_pb2.Column(name=name)
1190
+
1191
+ if isinstance(type, str):
1192
+ self.process_data_type(column, type)
1193
+
1194
+ elif isinstance(type, dict):
1195
+ if type['type'].upper() != "DECIMAL":
1196
+ raise ValueError("Expecting DECIMAL data type")
1197
+ column.type = common_pb2.DataType.DECIMAL
1198
+ column.decimal.precision = type['precision']
1199
+ column.decimal.scale = type['scale']
1200
+
1201
+ else:
1202
+ raise ValueError("Unrecognized column type: ", str(type))
1203
+
1204
+ if "primary_key" in entry and name in entry["primary_key"]:
1205
+ column.primary_key = True
1206
+
1207
+ columns[name] = column
1208
+
1209
+ def process_data_type(self, column, type):
1210
+ if type.upper() == "BOOLEAN":
1211
+ column.type = common_pb2.DataType.BOOLEAN
1212
+ elif type.upper() == "SHORT":
1213
+ column.type = common_pb2.DataType.SHORT
1214
+ elif type.upper() == "INT":
1215
+ column.type = common_pb2.DataType.SHORT
1216
+ elif type.upper() == "LONG":
1217
+ column.type = common_pb2.DataType.LONG
1218
+ elif type.upper() == "DECIMAL":
1219
+ raise ValueError("DECIMAL data type missing precision and scale")
1220
+ elif type.upper() == "FLOAT":
1221
+ column.type = common_pb2.DataType.FLOAT
1222
+ elif type.upper() == "DOUBLE":
1223
+ column.type = common_pb2.DataType.DOUBLE
1224
+ elif type.upper() == "NAIVE_DATE":
1225
+ column.type = common_pb2.DataType.NAIVE_DATE
1226
+ elif type.upper() == "NAIVE_DATETIME":
1227
+ column.type = common_pb2.DataType.NAIVE_DATETIME
1228
+ elif type.upper() == "UTC_DATETIME":
1229
+ column.type = common_pb2.DataType.UTC_DATETIME
1230
+ elif type.upper() == "BINARY":
1231
+ column.type = common_pb2.DataType.BINARY
1232
+ elif type.upper() == "XML":
1233
+ column.type = common_pb2.DataType.XML
1234
+ elif type.upper() == "STRING":
1235
+ column.type = common_pb2.DataType.STRING
1236
+ elif type.upper() == "JSON":
1237
+ column.type = common_pb2.DataType.JSON
1238
+ else:
1239
+ raise ValueError("Unrecognized column type encountered:: ", str(type))
1235
1240
 
1236
1241
  def Update(self, request, context):
1237
1242
  """Overrides the Update method from ConnectorServicer.
@@ -1268,6 +1273,7 @@ def find_connector_object(project_path) -> Connector:
1268
1273
  object: The connector object.
1269
1274
  """
1270
1275
 
1276
+ sys.path.append(project_path) # Allows python interpreter to search for modules in this path
1271
1277
  module_name = "connector_connector_code"
1272
1278
  connector_py = os.path.join(project_path, "connector.py")
1273
1279
  spec = importlib.util.spec_from_file_location(module_name, connector_py)
@@ -1280,14 +1286,16 @@ def find_connector_object(project_path) -> Connector:
1280
1286
  if '<fivetran_connector_sdk.Connector object at' in str(obj_attr):
1281
1287
  return obj_attr
1282
1288
 
1283
- print("SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
1289
+ print(
1290
+ "SEVERE: The connector object is missing. Please ensure that you have defined a connector object using the correct syntax in your `connector.py` file. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsrequiredobjectconnector")
1284
1291
  sys.exit(1)
1285
1292
 
1286
1293
 
1287
1294
  def suggest_correct_command(input_command: str) -> bool:
1288
1295
  # for typos
1289
1296
  # calculate the edit distance of the input command (lowercased) with each of the valid commands
1290
- edit_distances_of_commands = sorted([(command, edit_distance(command, input_command.lower())) for command in VALID_COMMANDS], key=lambda x: x[1])
1297
+ edit_distances_of_commands = sorted(
1298
+ [(command, edit_distance(command, input_command.lower())) for command in VALID_COMMANDS], key=lambda x: x[1])
1291
1299
 
1292
1300
  if edit_distances_of_commands[0][1] <= MAX_ALLOWED_EDIT_DISTANCE_FROM_VALID_COMMAND:
1293
1301
  # if the closest command is within the max allowed edit distance, we suggest that command
@@ -1315,15 +1323,15 @@ def edit_distance(first_string: str, second_string: str) -> int:
1315
1323
  first_string_length: int = len(first_string)
1316
1324
  second_string_length: int = len(second_string)
1317
1325
 
1318
- # Initialize the previous row of distances (for the base case of an empty first string)
1319
- # 'previous_row[j]' holds the edit distance between an empty prefix of 'first_string' and the first 'j' characters of 'second_string'.
1326
+ # Initialize the previous row of distances (for the base case of an empty first string) 'previous_row[j]' holds
1327
+ # the edit distance between an empty prefix of 'first_string' and the first 'j' characters of 'second_string'.
1320
1328
  # The first row is filled with values [0, 1, 2, ..., second_string_length]
1321
1329
  previous_row: list[int] = list(range(second_string_length + 1))
1322
1330
 
1323
1331
  # Rest of the rows
1324
1332
  for first_string_index in range(1, first_string_length + 1):
1325
1333
  # Start the current row with the distance for an empty second string
1326
- current_row: list[int] = [first_string_index] # j = 0
1334
+ current_row: list[int] = [first_string_index]
1327
1335
 
1328
1336
  # Iterate over each character in the second string
1329
1337
  for second_string_index in range(1, second_string_length + 1):
@@ -1332,7 +1340,8 @@ def edit_distance(first_string: str, second_string: str) -> int:
1332
1340
  current_row.append(previous_row[second_string_index - 1])
1333
1341
  else:
1334
1342
  # Minimum cost of insertion, deletion, or substitution
1335
- current_row.append(1 + min(current_row[-1], previous_row[second_string_index], previous_row[second_string_index - 1]))
1343
+ current_row.append(
1344
+ 1 + min(current_row[-1], previous_row[second_string_index], previous_row[second_string_index - 1]))
1336
1345
 
1337
1346
  # Move to the next row
1338
1347
  previous_row = current_row
@@ -1374,16 +1383,40 @@ def main():
1374
1383
  configuration = args.configuration if args.configuration else None
1375
1384
  state = args.state if args.state else os.getenv('FIVETRAN_STATE', None)
1376
1385
 
1386
+ configuration = validate_and_load_configuration(args, configuration)
1387
+ state = validate_and_load_state(args, state)
1388
+
1389
+ if args.command.lower() == "deploy":
1390
+ if args.state:
1391
+ print("WARNING: 'state' parameter is not used for 'deploy' command")
1392
+ connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
1393
+
1394
+ elif args.command.lower() == "debug":
1395
+ connector_object.debug(args.project_path, configuration, state)
1396
+
1397
+ elif args.command.lower() == "reset":
1398
+ reset_local_file_directory(args)
1399
+ else:
1400
+ if not suggest_correct_command(args.command):
1401
+ raise NotImplementedError(f"Invalid command: {args.command}, see `fivetran --help`")
1402
+
1403
+
1404
+ def validate_and_load_configuration(args, configuration):
1377
1405
  if configuration:
1378
1406
  json_filepath = os.path.join(args.project_path, args.configuration)
1379
1407
  if os.path.isfile(json_filepath):
1380
1408
  with open(json_filepath, 'r') as fi:
1381
1409
  configuration = json.load(fi)
1382
1410
  else:
1383
- raise ValueError("Configuration must be provided as a JSON file. Please check your input. Reference: https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
1411
+ raise ValueError(
1412
+ "Configuration must be provided as a JSON file. Please check your input. Reference: "
1413
+ "https://fivetran.com/docs/connectors/connector-sdk/detailed-guide#workingwithconfigurationjsonfile")
1384
1414
  else:
1385
1415
  configuration = {}
1416
+ return configuration
1386
1417
 
1418
+
1419
+ def validate_and_load_state(args, state):
1387
1420
  if state:
1388
1421
  json_filepath = os.path.join(args.project_path, args.state)
1389
1422
  if os.path.isfile(json_filepath):
@@ -1393,31 +1426,23 @@ def main():
1393
1426
  state = json.loads(state)
1394
1427
  else:
1395
1428
  state = {}
1429
+ return state
1396
1430
 
1397
- if args.command.lower() == "deploy":
1398
- if args.state:
1399
- print("WARNING: 'state' parameter is not used for 'deploy' command")
1400
- connector_object.deploy(args.project_path, ft_deploy_key, ft_group, ft_connection, configuration)
1401
-
1402
- elif args.command.lower() == "debug":
1403
- connector_object.debug(args.project_path, configuration, state)
1404
1431
 
1405
- elif args.command.lower() == "reset":
1406
- files_path = os.path.join(args.project_path, OUTPUT_FILES_DIR)
1407
- confirm = input("This will delete your current state and `warehouse.db` files. Do you want to continue? (Y/N): ")
1408
- if confirm.lower() != "y":
1409
- print("INFO: Reset canceled")
1410
- else:
1411
- try:
1412
- if os.path.exists(files_path) and os.path.isdir(files_path):
1413
- shutil.rmtree(files_path)
1414
- print("INFO: Reset Successful")
1415
- except Exception as e:
1416
- print("ERROR: Reset Failed")
1417
- raise e
1432
+ def reset_local_file_directory(args):
1433
+ files_path = os.path.join(args.project_path, OUTPUT_FILES_DIR)
1434
+ confirm = input(
1435
+ "This will delete your current state and `warehouse.db` files. Do you want to continue? (Y/N): ")
1436
+ if confirm.lower() != "y":
1437
+ print("INFO: Reset canceled")
1418
1438
  else:
1419
- if not suggest_correct_command(args.command):
1420
- raise NotImplementedError(f"Invalid command: {args.command}, see `fivetran --help`")
1439
+ try:
1440
+ if os.path.exists(files_path) and os.path.isdir(files_path):
1441
+ shutil.rmtree(files_path)
1442
+ print("INFO: Reset Successful")
1443
+ except Exception as e:
1444
+ print("ERROR: Reset Failed")
1445
+ raise e
1421
1446
 
1422
1447
 
1423
1448
  if __name__ == "__main__":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fivetran_connector_sdk
3
- Version: 0.9.30.1
3
+ Version: 0.10.15.1
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
@@ -17,6 +17,8 @@ Requires-Dist: get_pypi_latest_version==0.0.12
17
17
  Requires-Dist: pipreqs==0.5.0
18
18
 
19
19
  # **fivetran-connector-sdk**
20
+ [![Downloads](https://static.pepy.tech/badge/fivetran-connector-sdk)](https://pepy.tech/project/fivetran-connector-sdk)
21
+
20
22
  The *fivetran-connector-sdk* allows users to execute custom, self-written Python code within [Fivetran's](https://www.fivetran.com/) secure cloud environment. Fivetran automatically manages running the connectors on your scheduled frequency and manages the required compute resources.
21
23
 
22
24
  The Connector SDK service is the best fit for the following use cases: