fivetran-connector-sdk 0.7.24.2__py3-none-any.whl → 0.8.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fivetran_connector_sdk/__init__.py +457 -33
- {fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/METADATA +2 -2
- {fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/RECORD +6 -7
- {fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/WHEEL +1 -1
- fivetran_connector_sdk-0.7.24.2.dist-info/LICENSE +0 -21
- {fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/entry_points.txt +0 -0
- {fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/top_level.txt +0 -0
@@ -11,6 +11,7 @@ import subprocess
|
|
11
11
|
import sys
|
12
12
|
import time
|
13
13
|
import traceback
|
14
|
+
import re
|
14
15
|
|
15
16
|
from concurrent import futures
|
16
17
|
from datetime import datetime
|
@@ -22,14 +23,14 @@ from fivetran_connector_sdk.protos import common_pb2
|
|
22
23
|
from fivetran_connector_sdk.protos import connector_sdk_pb2
|
23
24
|
from fivetran_connector_sdk.protos import connector_sdk_pb2_grpc
|
24
25
|
|
25
|
-
__version__ = "0.
|
26
|
+
__version__ = "0.8.12.1"
|
26
27
|
|
27
28
|
MAC_OS = "mac"
|
28
29
|
WIN_OS = "windows"
|
29
30
|
LINUX_OS = "linux"
|
30
31
|
|
31
|
-
TESTER_VERSION = "0.24.
|
32
|
-
TESTER_FILENAME = "
|
32
|
+
TESTER_VERSION = "0.24.0807.001"
|
33
|
+
TESTER_FILENAME = "run_sdk_tester.jar"
|
33
34
|
VERSION_FILENAME = "version.txt"
|
34
35
|
UPLOAD_FILENAME = "code.zip"
|
35
36
|
LAST_VERSION_CHECK_FILE = "_last_version_check"
|
@@ -54,28 +55,54 @@ class Logging:
|
|
54
55
|
|
55
56
|
@staticmethod
|
56
57
|
def __log(level: Level, message: str):
|
58
|
+
"""Logs a message with the specified logging level.
|
59
|
+
|
60
|
+
Args:
|
61
|
+
level (Logging.Level): The logging level.
|
62
|
+
message (str): The message to log.
|
63
|
+
"""
|
57
64
|
if DEBUGGING:
|
58
65
|
print(message)
|
59
66
|
else:
|
60
|
-
print(f'{{"level":"{level}", "message": "{message}", "message-origin": "connector_sdk"}}')
|
67
|
+
print(f'{{"level":"{level.name}", "message": "{message}", "message-origin": "connector_sdk"}}')
|
61
68
|
|
62
69
|
@staticmethod
|
63
70
|
def fine(message: str):
|
71
|
+
"""Logs a fine-level message.
|
72
|
+
|
73
|
+
Args:
|
74
|
+
message (str): The message to log.
|
75
|
+
"""
|
64
76
|
if DEBUGGING and Logging.LOG_LEVEL == Logging.Level.FINE:
|
65
77
|
Logging.__log(Logging.Level.FINE, message)
|
66
78
|
|
67
79
|
@staticmethod
|
68
80
|
def info(message: str):
|
81
|
+
"""Logs an info-level message.
|
82
|
+
|
83
|
+
Args:
|
84
|
+
message (str): The message to log.
|
85
|
+
"""
|
69
86
|
if Logging.LOG_LEVEL <= Logging.Level.INFO:
|
70
87
|
Logging.__log(Logging.Level.INFO, message)
|
71
88
|
|
72
89
|
@staticmethod
|
73
90
|
def warning(message: str):
|
91
|
+
"""Logs a warning-level message.
|
92
|
+
|
93
|
+
Args:
|
94
|
+
message (str): The message to log.
|
95
|
+
"""
|
74
96
|
if Logging.LOG_LEVEL <= Logging.Level.WARNING:
|
75
97
|
Logging.__log(Logging.Level.WARNING, message)
|
76
98
|
|
77
99
|
@staticmethod
|
78
100
|
def severe(message: str):
|
101
|
+
"""Logs a severe-level message.
|
102
|
+
|
103
|
+
Args:
|
104
|
+
message (str): The message to log.
|
105
|
+
"""
|
79
106
|
if Logging.LOG_LEVEL == Logging.Level.SEVERE:
|
80
107
|
Logging.__log(Logging.Level.SEVERE, message)
|
81
108
|
|
@@ -83,6 +110,15 @@ class Logging:
|
|
83
110
|
class Operations:
|
84
111
|
@staticmethod
|
85
112
|
def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
|
113
|
+
"""Performs an upsert operation on the specified table with the given data, deleting any existing value with the same primary key.
|
114
|
+
|
115
|
+
Args:
|
116
|
+
table (str): The name of the table.
|
117
|
+
data (dict): The data to upsert.
|
118
|
+
|
119
|
+
Returns:
|
120
|
+
list[connector_sdk_pb2.UpdateResponse]: A list of update responses.
|
121
|
+
"""
|
86
122
|
_yield_check(inspect.stack())
|
87
123
|
|
88
124
|
responses = []
|
@@ -117,6 +153,15 @@ class Operations:
|
|
117
153
|
|
118
154
|
@staticmethod
|
119
155
|
def update(table: str, modified: dict) -> connector_sdk_pb2.UpdateResponse:
|
156
|
+
"""Performs an update operation on the specified table with the given modified data.
|
157
|
+
|
158
|
+
Args:
|
159
|
+
table (str): The name of the table.
|
160
|
+
modified (dict): The modified data.
|
161
|
+
|
162
|
+
Returns:
|
163
|
+
connector_sdk_pb2.UpdateResponse: The update response.
|
164
|
+
"""
|
120
165
|
_yield_check(inspect.stack())
|
121
166
|
|
122
167
|
columns = _get_columns(table)
|
@@ -133,6 +178,15 @@ class Operations:
|
|
133
178
|
|
134
179
|
@staticmethod
|
135
180
|
def delete(table: str, keys: dict) -> connector_sdk_pb2.UpdateResponse:
|
181
|
+
"""Performs a soft delete operation on the specified table with the given keys.
|
182
|
+
|
183
|
+
Args:
|
184
|
+
table (str): The name of the table.
|
185
|
+
keys (dict): The keys to delete.
|
186
|
+
|
187
|
+
Returns:
|
188
|
+
connector_sdk_pb2.UpdateResponse: The delete response.
|
189
|
+
"""
|
136
190
|
_yield_check(inspect.stack())
|
137
191
|
|
138
192
|
columns = _get_columns(table)
|
@@ -147,9 +201,16 @@ class Operations:
|
|
147
201
|
return connector_sdk_pb2.UpdateResponse(
|
148
202
|
operation=connector_sdk_pb2.Operation(record=record))
|
149
203
|
|
150
|
-
|
151
204
|
@staticmethod
|
152
205
|
def checkpoint(state: dict) -> connector_sdk_pb2.UpdateResponse:
|
206
|
+
"""Tries to upload all rows to the data warehouse and save state.
|
207
|
+
|
208
|
+
Args:
|
209
|
+
state (dict): The state to checkpoint.
|
210
|
+
|
211
|
+
Returns:
|
212
|
+
connector_sdk_pb2.UpdateResponse: The checkpoint response.
|
213
|
+
"""
|
153
214
|
_yield_check(inspect.stack())
|
154
215
|
return connector_sdk_pb2.UpdateResponse(
|
155
216
|
operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
|
@@ -157,6 +218,7 @@ class Operations:
|
|
157
218
|
|
158
219
|
|
159
220
|
def check_newer_version():
|
221
|
+
"""Periodically checks for a newer version of the SDK and notifies the user if one is available."""
|
160
222
|
tester_root_dir = _tester_root_dir()
|
161
223
|
last_check_file_path = os.path.join(tester_root_dir, LAST_VERSION_CHECK_FILE)
|
162
224
|
if not os.path.isdir(tester_root_dir):
|
@@ -181,11 +243,20 @@ def check_newer_version():
|
|
181
243
|
f_out.write(f"{int(time.time())}")
|
182
244
|
|
183
245
|
|
184
|
-
def _tester_root_dir():
|
246
|
+
def _tester_root_dir() -> str:
|
247
|
+
"""Returns the root directory for the tester."""
|
185
248
|
return os.path.join(os.path.expanduser("~"), ROOT_LOCATION)
|
186
249
|
|
187
250
|
|
188
251
|
def _get_columns(table: str) -> dict:
|
252
|
+
"""Retrieves the columns for the specified table.
|
253
|
+
|
254
|
+
Args:
|
255
|
+
table (str): The name of the table.
|
256
|
+
|
257
|
+
Returns:
|
258
|
+
dict: The columns for the table.
|
259
|
+
"""
|
189
260
|
columns = {}
|
190
261
|
if table in TABLES:
|
191
262
|
for column in TABLES[table].columns:
|
@@ -195,6 +266,15 @@ def _get_columns(table: str) -> dict:
|
|
195
266
|
|
196
267
|
|
197
268
|
def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
269
|
+
"""Maps data to the specified columns.
|
270
|
+
|
271
|
+
Args:
|
272
|
+
data (dict): The data to map.
|
273
|
+
columns (dict): The columns to map the data to.
|
274
|
+
|
275
|
+
Returns:
|
276
|
+
dict: The mapped data.
|
277
|
+
"""
|
198
278
|
mapped_data = {}
|
199
279
|
for k, v in data.items():
|
200
280
|
if v is None:
|
@@ -206,28 +286,28 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
206
286
|
mapped_data[k] = common_pb2.ValueType(bool=v)
|
207
287
|
elif columns[k].type == common_pb2.DataType.SHORT:
|
208
288
|
mapped_data[k] = common_pb2.ValueType(short=v)
|
209
|
-
elif columns[k].type ==
|
289
|
+
elif columns[k].type == common_pb2.DataType.INT:
|
210
290
|
mapped_data[k] = common_pb2.ValueType(int=v)
|
211
|
-
elif columns[k].type ==
|
291
|
+
elif columns[k].type == common_pb2.DataType.LONG:
|
212
292
|
mapped_data[k] = common_pb2.ValueType(long=v)
|
213
|
-
elif columns[k].type ==
|
293
|
+
elif columns[k].type == common_pb2.DataType.DECIMAL:
|
214
294
|
mapped_data[k] = common_pb2.ValueType(decimal=v)
|
215
|
-
elif columns[k].type ==
|
295
|
+
elif columns[k].type == common_pb2.DataType.FLOAT:
|
216
296
|
mapped_data[k] = common_pb2.ValueType(float=v)
|
217
|
-
elif columns[k].type ==
|
297
|
+
elif columns[k].type == common_pb2.DataType.DOUBLE:
|
218
298
|
mapped_data[k] = common_pb2.ValueType(double=v)
|
219
|
-
elif columns[k].type ==
|
299
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
|
220
300
|
timestamp = timestamp_pb2.Timestamp()
|
221
301
|
dt = datetime.strptime(v, "%Y-%m-%d")
|
222
302
|
timestamp.FromDatetime(dt)
|
223
303
|
mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
|
224
|
-
elif columns[k].type ==
|
304
|
+
elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
|
225
305
|
if '.' not in v: v = v + ".0"
|
226
306
|
timestamp = timestamp_pb2.Timestamp()
|
227
307
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
|
228
308
|
timestamp.FromDatetime(dt)
|
229
309
|
mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
|
230
|
-
elif columns[k].type ==
|
310
|
+
elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
|
231
311
|
timestamp = timestamp_pb2.Timestamp()
|
232
312
|
if '.' in v:
|
233
313
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f%z")
|
@@ -235,14 +315,14 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
235
315
|
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
|
236
316
|
timestamp.FromDatetime(dt)
|
237
317
|
mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
|
238
|
-
elif columns[k].type ==
|
318
|
+
elif columns[k].type == common_pb2.DataType.BINARY:
|
239
319
|
mapped_data[k] = common_pb2.ValueType(binary=v)
|
240
|
-
elif columns[k].type ==
|
320
|
+
elif columns[k].type == common_pb2.DataType.XML:
|
241
321
|
mapped_data[k] = common_pb2.ValueType(xml=v)
|
242
|
-
elif columns[k].type ==
|
322
|
+
elif columns[k].type == common_pb2.DataType.STRING:
|
243
323
|
incoming = v if isinstance(v, str) else str(v)
|
244
324
|
mapped_data[k] = common_pb2.ValueType(string=incoming)
|
245
|
-
elif columns[k].type ==
|
325
|
+
elif columns[k].type == common_pb2.DataType.JSON:
|
246
326
|
mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
|
247
327
|
else:
|
248
328
|
raise ValueError(f"Unknown data type: {columns[k].type}")
|
@@ -273,6 +353,11 @@ def _map_data_to_columns(data: dict, columns: dict) -> dict:
|
|
273
353
|
|
274
354
|
|
275
355
|
def _yield_check(stack):
|
356
|
+
"""Checks for the presence of 'yield' in the calling code.
|
357
|
+
Args:
|
358
|
+
stack: The stack frame to check.
|
359
|
+
"""
|
360
|
+
|
276
361
|
# Known issue with inspect.getmodule() and yield behavior in a frozen application.
|
277
362
|
# When using inspect.getmodule() on stack frames obtained by inspect.stack(), it fails
|
278
363
|
# to resolve the modules in a frozen application due to incompatible assumptions about
|
@@ -293,7 +378,16 @@ def _yield_check(stack):
|
|
293
378
|
raise RuntimeError(f"Unable to find '{called_method}' function in stack")
|
294
379
|
|
295
380
|
|
296
|
-
def _check_dict(incoming: dict, string_only: bool = False):
|
381
|
+
def _check_dict(incoming: dict, string_only: bool = False) -> dict:
|
382
|
+
"""Validates the incoming dictionary.
|
383
|
+
Args:
|
384
|
+
incoming (dict): The dictionary to validate.
|
385
|
+
string_only (bool): Whether to allow only string values.
|
386
|
+
|
387
|
+
Returns:
|
388
|
+
dict: The validated dictionary.
|
389
|
+
"""
|
390
|
+
|
297
391
|
if not incoming:
|
298
392
|
return {}
|
299
393
|
|
@@ -311,6 +405,12 @@ def _check_dict(incoming: dict, string_only: bool = False):
|
|
311
405
|
|
312
406
|
class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
313
407
|
def __init__(self, update, schema=None):
|
408
|
+
"""Initializes the Connector instance.
|
409
|
+
Args:
|
410
|
+
update: The update method.
|
411
|
+
schema: The schema method.
|
412
|
+
"""
|
413
|
+
|
314
414
|
self.schema_method = schema
|
315
415
|
self.update_method = update
|
316
416
|
|
@@ -319,6 +419,13 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
319
419
|
|
320
420
|
# Call this method to unpause and start a connector
|
321
421
|
def start(self, deploy_key: str, group: str, connection: str):
|
422
|
+
"""Starts the connector with the given deployment key, group name, and connection schema name.
|
423
|
+
|
424
|
+
Args:
|
425
|
+
deploy_key (str): The deployment key.
|
426
|
+
group (str): The group name.
|
427
|
+
connection (str): The connection schema name.
|
428
|
+
"""
|
322
429
|
if not deploy_key: print("ERROR: Missing deploy key"); os._exit(1)
|
323
430
|
if not connection: print("ERROR: Missing connection name"); os._exit(1)
|
324
431
|
|
@@ -334,13 +441,118 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
334
441
|
|
335
442
|
@staticmethod
|
336
443
|
def __unpause_connection(id: str, deploy_key: str) -> bool:
|
444
|
+
"""Unpauses the connection with the given ID and deployment key.
|
445
|
+
|
446
|
+
Args:
|
447
|
+
id (str): The connection ID.
|
448
|
+
deploy_key (str): The deployment key.
|
449
|
+
|
450
|
+
Returns:
|
451
|
+
bool: True if the connection was successfully unpaused, False otherwise.
|
452
|
+
"""
|
337
453
|
resp = rq.patch(f"https://api.fivetran.com/v1/connectors/{id}",
|
338
454
|
headers={"Authorization": f"Basic {deploy_key}"},
|
339
455
|
json={"force": True})
|
340
456
|
return resp.ok
|
341
457
|
|
458
|
+
@staticmethod
|
459
|
+
def fetch_requirements_from_file(file_path: str) -> list[str]:
|
460
|
+
"""Reads a requirements file and returns a list of dependencies.
|
461
|
+
|
462
|
+
Args:
|
463
|
+
file_path (str): The path to the requirements file.
|
464
|
+
|
465
|
+
Returns:
|
466
|
+
list[str]: A list of dependencies as strings.
|
467
|
+
"""
|
468
|
+
with open(file_path, 'r') as f:
|
469
|
+
return f.read().splitlines()
|
470
|
+
|
471
|
+
@staticmethod
|
472
|
+
def fetch_requirements_as_dict(self, file_path: str) -> dict:
|
473
|
+
"""Converts a list of dependencies from the requirements file into a dictionary.
|
474
|
+
|
475
|
+
Args:
|
476
|
+
file_path (str): The path to the requirements file.
|
477
|
+
|
478
|
+
Returns:
|
479
|
+
dict: A dictionary where keys are package names (lowercased) and
|
480
|
+
values are the full dependency strings.
|
481
|
+
"""
|
482
|
+
return {item.split("==")[0].lower(): item.lower() for item in
|
483
|
+
self.fetch_requirements_from_file(file_path)}
|
484
|
+
|
485
|
+
def validate_requirements_file(self, project_path: str, is_deploy: bool):
|
486
|
+
"""Validates the `requirements.txt` file against the project's actual dependencies.
|
487
|
+
|
488
|
+
This method generates a temporary requirements file using `pipreqs`, compares
|
489
|
+
it with the existing `requirements.txt`, and checks for version mismatches,
|
490
|
+
missing dependencies, and unused dependencies. It will issue warnings, errors,
|
491
|
+
or even terminate the process depending on whether it's being run for deployment.
|
492
|
+
|
493
|
+
Args:
|
494
|
+
project_path (str): The path to the project directory containing the `requirements.txt`.
|
495
|
+
is_deploy (bool): If `True`, the method will exit the process on critical errors.
|
496
|
+
|
497
|
+
"""
|
498
|
+
subprocess.run(["pipreqs", "--savepath", "tmp_requirements.txt"], text=True, check=True)
|
499
|
+
tmp_requirements_file_path = os.path.join(project_path, 'tmp_requirements.txt')
|
500
|
+
|
501
|
+
tmp_requirements = self.fetch_requirements_as_dict(self, tmp_requirements_file_path)
|
502
|
+
tmp_requirements.pop("fivetran_connector_sdk")
|
503
|
+
os.remove(tmp_requirements_file_path)
|
504
|
+
|
505
|
+
if len(tmp_requirements) > 0:
|
506
|
+
if os.path.exists("requirements.txt"):
|
507
|
+
requirements = self.fetch_requirements_as_dict(self, os.path.join(project_path, 'requirements.txt'))
|
508
|
+
else:
|
509
|
+
with open("requirements.txt", 'w'):
|
510
|
+
pass
|
511
|
+
requirements = {}
|
512
|
+
print("WARNING: Adding `requirements.txt` file to your project folder.")
|
513
|
+
|
514
|
+
version_mismatch_deps = {key: tmp_requirements[key] for key in
|
515
|
+
(requirements.keys() & tmp_requirements.keys())
|
516
|
+
if requirements[key] != tmp_requirements[key]}
|
517
|
+
if version_mismatch_deps:
|
518
|
+
print("WARNING: We recommend using the current stable version for the following:")
|
519
|
+
print(version_mismatch_deps)
|
520
|
+
|
521
|
+
missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
|
522
|
+
if missing_deps:
|
523
|
+
log_level = "ERROR" if is_deploy else "WARNING"
|
524
|
+
print(log_level +
|
525
|
+
": Please include the following dependency libraries in requirements.txt, to be used by "
|
526
|
+
"Fivetran production. "
|
527
|
+
"For more information, please visit: "
|
528
|
+
"https://fivetran.com/docs/connectors/connector-sdk/detailed-guide"
|
529
|
+
"#workingwithrequirementstxtfile")
|
530
|
+
print(*list(missing_deps.values()))
|
531
|
+
if is_deploy:
|
532
|
+
os._exit(1)
|
533
|
+
|
534
|
+
unused_deps = list(requirements.keys() - tmp_requirements.keys())
|
535
|
+
if unused_deps:
|
536
|
+
print("INFO: The following dependencies are not used in connector.py:")
|
537
|
+
print(*unused_deps)
|
538
|
+
else:
|
539
|
+
if os.path.exists("requirements.txt"):
|
540
|
+
print("WARNING: `requirements.txt` is not required as no additional "
|
541
|
+
"Python libraries are required for your code.")
|
542
|
+
|
543
|
+
if is_deploy: print("Successful validation of requirements.txt")
|
544
|
+
|
342
545
|
# Call this method to deploy the connector to Fivetran platform
|
343
546
|
def deploy(self, project_path: str, deploy_key: str, group: str, connection: str, configuration: dict = None):
|
547
|
+
"""Deploys the connector to the Fivetran platform.
|
548
|
+
|
549
|
+
Args:
|
550
|
+
project_path (str): The path to the connector project.
|
551
|
+
deploy_key (str): The deployment key.
|
552
|
+
group (str): The group name.
|
553
|
+
connection (str): The connection name.
|
554
|
+
configuration (dict): The configuration dictionary.
|
555
|
+
"""
|
344
556
|
if not deploy_key: print("ERROR: Missing deploy key"); os._exit(1)
|
345
557
|
if not connection: print("ERROR: Missing connection name"); os._exit(1)
|
346
558
|
_check_dict(configuration)
|
@@ -356,10 +568,13 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
356
568
|
"sync_method": "DIRECT",
|
357
569
|
"custom_payloads": [],
|
358
570
|
}
|
571
|
+
|
572
|
+
self.validate_requirements_file(project_path, True)
|
573
|
+
|
359
574
|
group_id, group_name = self.__get_group_info(group, deploy_key)
|
360
575
|
print(f"Deploying '{project_path}' to '{group_name}/{connection}'")
|
361
576
|
upload_file_path = self.__create_upload_file(project_path)
|
362
|
-
upload_result = self.__upload(upload_file_path, deploy_key,group_id,connection)
|
577
|
+
upload_result = self.__upload(upload_file_path, deploy_key, group_id, connection)
|
363
578
|
os.remove(upload_file_path)
|
364
579
|
if not upload_result:
|
365
580
|
os._exit(1)
|
@@ -378,6 +593,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
378
593
|
|
379
594
|
@staticmethod
|
380
595
|
def __force_sync(id: str, deploy_key: str) -> bool:
|
596
|
+
"""Forces a sync operation on the connection with the given ID and deployment key.
|
597
|
+
|
598
|
+
Args:
|
599
|
+
id (str): The connection ID.
|
600
|
+
deploy_key (str): The deployment key.
|
601
|
+
|
602
|
+
Returns:
|
603
|
+
bool: True if the sync was successfully started, False otherwise.
|
604
|
+
"""
|
381
605
|
resp = rq.post(f"https://api.fivetran.com/v1/connectors/{id}/sync",
|
382
606
|
headers={"Authorization": f"Basic {deploy_key}"},
|
383
607
|
json={"force": True})
|
@@ -385,6 +609,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
385
609
|
|
386
610
|
@staticmethod
|
387
611
|
def __update_connection(id: str, name: str, group: str, config: dict, deploy_key: str):
|
612
|
+
"""Updates the connection with the given ID, name, group, configuration, and deployment key.
|
613
|
+
|
614
|
+
Args:
|
615
|
+
id (str): The connection ID.
|
616
|
+
name (str): The connection name.
|
617
|
+
group (str): The group name.
|
618
|
+
config (dict): The configuration dictionary.
|
619
|
+
deploy_key (str): The deployment key.
|
620
|
+
"""
|
388
621
|
if not config["secrets_list"]:
|
389
622
|
del config["secrets_list"]
|
390
623
|
|
@@ -400,7 +633,18 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
400
633
|
os._exit(1)
|
401
634
|
|
402
635
|
@staticmethod
|
403
|
-
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str):
|
636
|
+
def __get_connection_id(name: str, group: str, group_id: str, deploy_key: str) -> str | None:
|
637
|
+
"""Retrieves the connection ID for the specified connection schema name, group, and deployment key.
|
638
|
+
|
639
|
+
Args:
|
640
|
+
name (str): The connection name.
|
641
|
+
group (str): The group name.
|
642
|
+
group_id (str): The group ID.
|
643
|
+
deploy_key (str): The deployment key.
|
644
|
+
|
645
|
+
Returns:
|
646
|
+
str: The connection ID, or None
|
647
|
+
"""
|
404
648
|
resp = rq.get(f"https://api.fivetran.com/v1/groups/{group_id}/connectors",
|
405
649
|
headers={"Authorization": f"Basic {deploy_key}"},
|
406
650
|
params={"schema": name})
|
@@ -414,7 +658,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
414
658
|
return None
|
415
659
|
|
416
660
|
@staticmethod
|
417
|
-
def __create_connection(deploy_key: str, group_id: str, config: dict):
|
661
|
+
def __create_connection(deploy_key: str, group_id: str, config: dict) -> rq.Response:
|
662
|
+
"""Creates a new connection with the given deployment key, group ID, and configuration.
|
663
|
+
|
664
|
+
Args:
|
665
|
+
deploy_key (str): The deployment key.
|
666
|
+
group_id (str): The group ID.
|
667
|
+
config (dict): The configuration dictionary.
|
668
|
+
|
669
|
+
Returns:
|
670
|
+
rq.Response: The response object.
|
671
|
+
"""
|
418
672
|
response = rq.post(f"https://api.fivetran.com/v1/connectors",
|
419
673
|
headers={"Authorization": f"Basic {deploy_key}"},
|
420
674
|
json={
|
@@ -428,29 +682,59 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
428
682
|
return response
|
429
683
|
|
430
684
|
def __create_upload_file(self, project_path: str) -> str:
|
685
|
+
"""Creates an upload file for the given project path.
|
686
|
+
|
687
|
+
Args:
|
688
|
+
project_path (str): The path to the project.
|
689
|
+
|
690
|
+
Returns:
|
691
|
+
str: The path to the upload file.
|
692
|
+
"""
|
431
693
|
print("Packaging project for upload ..")
|
432
694
|
zip_file_path = self.__zip_folder(project_path)
|
433
695
|
print("✓")
|
434
696
|
return zip_file_path
|
435
697
|
|
436
698
|
def __zip_folder(self, project_path: str) -> str:
|
699
|
+
"""Zips the folder at the given project path.
|
700
|
+
|
701
|
+
Args:
|
702
|
+
project_path (str): The path to the project.
|
703
|
+
|
704
|
+
Returns:
|
705
|
+
str: The path to the zip file.
|
706
|
+
"""
|
437
707
|
upload_filepath = os.path.join(project_path, UPLOAD_FILENAME)
|
708
|
+
connector_file_exists = False
|
438
709
|
|
439
710
|
with ZipFile(upload_filepath, 'w', ZIP_DEFLATED) as zipf:
|
440
711
|
for root, files in self.__dir_walker(project_path):
|
441
712
|
for file in files:
|
713
|
+
if file == "connector.py":
|
714
|
+
connector_file_exists = True
|
442
715
|
file_path = os.path.join(root, file)
|
443
716
|
arcname = os.path.relpath(file_path, project_path)
|
444
717
|
zipf.write(file_path, arcname)
|
445
718
|
|
719
|
+
if not connector_file_exists:
|
720
|
+
print("ERROR: Missing connector.py file")
|
721
|
+
os._exit(1)
|
446
722
|
return upload_filepath
|
447
723
|
|
448
724
|
def __dir_walker(self, top):
|
725
|
+
"""Walks the directory tree starting at the given top directory.
|
726
|
+
|
727
|
+
Args:
|
728
|
+
top (str): The top directory to start the walk.
|
729
|
+
|
730
|
+
Yields:
|
731
|
+
tuple: A tuple containing the current directory path and a list of files.
|
732
|
+
"""
|
449
733
|
dirs, files = [], []
|
450
734
|
for name in os.listdir(top):
|
451
735
|
path = os.path.join(top, name)
|
452
736
|
if os.path.isdir(path):
|
453
|
-
if name not in EXCLUDED_DIRS:
|
737
|
+
if (name not in EXCLUDED_DIRS) and (not name.startswith(".")):
|
454
738
|
dirs.append(name)
|
455
739
|
else:
|
456
740
|
if name.endswith(".py") or name == "requirements.txt":
|
@@ -464,6 +748,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
464
748
|
|
465
749
|
@staticmethod
|
466
750
|
def __upload(local_path: str, deploy_key: str, group_id: str, connection: str) -> bool:
|
751
|
+
"""Uploads the local code file for the specified group and connection.
|
752
|
+
|
753
|
+
Args:
|
754
|
+
local_path (str): The local file path.
|
755
|
+
deploy_key (str): The deployment key.
|
756
|
+
group_id (str): The group ID.
|
757
|
+
connection (str): The connection name.
|
758
|
+
|
759
|
+
Returns:
|
760
|
+
bool: True if the upload was successful, False otherwise.
|
761
|
+
"""
|
467
762
|
print("Uploading project .. ", end="", flush=True)
|
468
763
|
response = rq.post(f"https://api.fivetran.com/v2/deploy/{group_id}/{connection}",
|
469
764
|
files={'file': open(local_path, 'rb')},
|
@@ -477,6 +772,11 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
477
772
|
|
478
773
|
@staticmethod
|
479
774
|
def __get_os_name() -> str:
|
775
|
+
"""Returns the name of the operating system.
|
776
|
+
|
777
|
+
Returns:
|
778
|
+
str: The name of the operating system.
|
779
|
+
"""
|
480
780
|
os_sysname = platform.system().lower()
|
481
781
|
if os_sysname.startswith("darwin"):
|
482
782
|
return MAC_OS
|
@@ -488,6 +788,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
488
788
|
|
489
789
|
@staticmethod
|
490
790
|
def __get_group_info(group: str, deploy_key: str) -> tuple[str, str]:
|
791
|
+
"""Retrieves the group information for the specified group and deployment key.
|
792
|
+
|
793
|
+
Args:
|
794
|
+
group (str): The group name.
|
795
|
+
deploy_key (str): The deployment key.
|
796
|
+
|
797
|
+
Returns:
|
798
|
+
tuple[str, str]: A tuple containing the group ID and group name.
|
799
|
+
"""
|
491
800
|
resp = rq.get("https://api.fivetran.com/v1/groups",
|
492
801
|
headers={"Authorization": f"Basic {deploy_key}"})
|
493
802
|
|
@@ -521,6 +830,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
521
830
|
configuration: dict = None,
|
522
831
|
state: dict = None,
|
523
832
|
log_level: Logging.Level = Logging.Level.INFO) -> grpc.Server:
|
833
|
+
"""Runs the connector server.
|
834
|
+
|
835
|
+
Args:
|
836
|
+
port (int): The port number to listen for incoming requests.
|
837
|
+
configuration (dict): The configuration dictionary.
|
838
|
+
state (dict): The state dictionary.
|
839
|
+
log_level (Logging.Level): The logging level.
|
840
|
+
|
841
|
+
Returns:
|
842
|
+
grpc.Server: The gRPC server instance.
|
843
|
+
"""
|
524
844
|
self.configuration = _check_dict(configuration, True)
|
525
845
|
self.state = _check_dict(state)
|
526
846
|
Logging.LOG_LEVEL = log_level
|
@@ -529,7 +849,6 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
529
849
|
connector_sdk_pb2_grpc.add_ConnectorServicer_to_server(self, server)
|
530
850
|
server.add_insecure_port("[::]:" + str(port))
|
531
851
|
server.start()
|
532
|
-
print("Connector started, listening on " + str(port))
|
533
852
|
if DEBUGGING:
|
534
853
|
return server
|
535
854
|
server.wait_for_termination()
|
@@ -541,6 +860,18 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
541
860
|
configuration: dict = None,
|
542
861
|
state: dict = None,
|
543
862
|
log_level: Logging.Level = Logging.Level.FINE) -> bool:
|
863
|
+
"""Tests the connector code by running it with the connector tester.
|
864
|
+
|
865
|
+
Args:
|
866
|
+
project_path (str): The path to the project.
|
867
|
+
port (int): The port number to listen for incoming requests.
|
868
|
+
configuration (dict): The configuration dictionary.
|
869
|
+
state (dict): The state dictionary.
|
870
|
+
log_level (Logging.Level): The logging level.
|
871
|
+
|
872
|
+
Returns:
|
873
|
+
bool: True if there was an error, False otherwise.
|
874
|
+
"""
|
544
875
|
global DEBUGGING
|
545
876
|
DEBUGGING = True
|
546
877
|
|
@@ -598,16 +929,17 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
598
929
|
os._exit(1)
|
599
930
|
|
600
931
|
project_path = os.getcwd() if project_path is None else project_path
|
932
|
+
self.validate_requirements_file(project_path, False)
|
601
933
|
print(f"Debugging connector at: {project_path}")
|
602
934
|
server = self.run(port, configuration, state, log_level=log_level)
|
603
935
|
|
604
936
|
# Uncomment this to run the tester manually
|
605
|
-
#server.wait_for_termination()
|
937
|
+
# server.wait_for_termination()
|
606
938
|
|
607
939
|
error = False
|
608
940
|
try:
|
609
941
|
print(f"Starting connector tester..")
|
610
|
-
for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path):
|
942
|
+
for log_msg in self.__run_tester(java_exe, tester_root_dir, project_path, port):
|
611
943
|
print(log_msg, end="")
|
612
944
|
except:
|
613
945
|
print(traceback.format_exc())
|
@@ -618,12 +950,51 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
618
950
|
return error
|
619
951
|
|
620
952
|
@staticmethod
|
621
|
-
def __java_exe(location: str, os_name: str):
|
953
|
+
def __java_exe(location: str, os_name: str) -> str:
|
954
|
+
"""Returns the path to the Java executable.
|
955
|
+
|
956
|
+
Args:
|
957
|
+
location (str): The location of the Java executable.
|
958
|
+
os_name (str): The name of the operating system.
|
959
|
+
|
960
|
+
Returns:
|
961
|
+
str: The path to the Java executable.
|
962
|
+
"""
|
622
963
|
java_exe_base = os.path.join(location, "bin", "java")
|
623
964
|
return f"{java_exe_base}.exe" if os_name == WIN_OS else java_exe_base
|
624
965
|
|
625
966
|
@staticmethod
|
626
|
-
def
|
967
|
+
def process_stream(stream):
|
968
|
+
"""Processes a stream of text lines, replacing occurrences of a specified pattern.
|
969
|
+
|
970
|
+
This method reads each line from the provided stream, searches for occurrences of
|
971
|
+
a predefined pattern, and replaces them with a specified replacement string.
|
972
|
+
|
973
|
+
Args:
|
974
|
+
stream (iterable): An iterable stream of text lines, typically from a file or another input source.
|
975
|
+
|
976
|
+
Yields:
|
977
|
+
str: Each line from the stream after replacing the matched pattern with the replacement string.
|
978
|
+
"""
|
979
|
+
pattern = re.compile(r'com\.fivetran\.fivetran_sdk\.tools\.testers\.\S+')
|
980
|
+
replacement = 'Fivetran SDK Tester'
|
981
|
+
|
982
|
+
for line in iter(stream.readline, ""):
|
983
|
+
modified_line = pattern.sub(replacement, line)
|
984
|
+
yield modified_line
|
985
|
+
|
986
|
+
@staticmethod
|
987
|
+
def __run_tester(java_exe: str, root_dir: str, project_path: str, port: int = 50051):
|
988
|
+
"""Runs the connector tester.
|
989
|
+
|
990
|
+
Args:
|
991
|
+
java_exe (str): The path to the Java executable.
|
992
|
+
root_dir (str): The root directory.
|
993
|
+
project_path (str): The path to the project.
|
994
|
+
|
995
|
+
Yields:
|
996
|
+
str: The log messages from the tester.
|
997
|
+
"""
|
627
998
|
working_dir = os.path.join(project_path, OUTPUT_FILES_DIR)
|
628
999
|
try:
|
629
1000
|
os.mkdir(working_dir)
|
@@ -634,11 +1005,16 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
634
1005
|
"-jar",
|
635
1006
|
os.path.join(root_dir, TESTER_FILENAME),
|
636
1007
|
"--connector-sdk=true",
|
637
|
-
f"--
|
1008
|
+
f"--port={port}",
|
1009
|
+
f"--working-dir={working_dir}",
|
1010
|
+
"--tester-type=source"]
|
638
1011
|
|
639
|
-
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
640
|
-
for
|
641
|
-
yield
|
1012
|
+
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
1013
|
+
for line in Connector.process_stream(popen.stderr):
|
1014
|
+
yield line
|
1015
|
+
|
1016
|
+
for line in Connector.process_stream(popen.stdout):
|
1017
|
+
yield line
|
642
1018
|
popen.stdout.close()
|
643
1019
|
return_code = popen.wait()
|
644
1020
|
if return_code:
|
@@ -646,6 +1022,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
646
1022
|
|
647
1023
|
# -- Methods below override ConnectorServicer methods
|
648
1024
|
def ConfigurationForm(self, request, context):
|
1025
|
+
"""Overrides the ConfigurationForm method from ConnectorServicer.
|
1026
|
+
|
1027
|
+
Args:
|
1028
|
+
request: The gRPC request.
|
1029
|
+
context: The gRPC context.
|
1030
|
+
|
1031
|
+
Returns:
|
1032
|
+
common_pb2.ConfigurationFormResponse: An empty configuration form response.
|
1033
|
+
"""
|
649
1034
|
if not self.configuration:
|
650
1035
|
self.configuration = {}
|
651
1036
|
|
@@ -653,9 +1038,27 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
653
1038
|
return common_pb2.ConfigurationFormResponse()
|
654
1039
|
|
655
1040
|
def Test(self, request, context):
|
1041
|
+
"""Overrides the Test method from ConnectorServicer.
|
1042
|
+
|
1043
|
+
Args:
|
1044
|
+
request: The gRPC request.
|
1045
|
+
context: The gRPC context.
|
1046
|
+
|
1047
|
+
Returns:
|
1048
|
+
None: As this method is not implemented.
|
1049
|
+
"""
|
656
1050
|
return None
|
657
1051
|
|
658
1052
|
def Schema(self, request, context):
|
1053
|
+
"""Overrides the Schema method from ConnectorServicer.
|
1054
|
+
|
1055
|
+
Args:
|
1056
|
+
request: The gRPC request.
|
1057
|
+
context: The gRPC context.
|
1058
|
+
|
1059
|
+
Returns:
|
1060
|
+
connector_sdk_pb2.SchemaResponse: The schema response.
|
1061
|
+
"""
|
659
1062
|
global TABLES
|
660
1063
|
|
661
1064
|
if not self.schema_method:
|
@@ -741,6 +1144,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
741
1144
|
return connector_sdk_pb2.SchemaResponse(without_schema=common_pb2.TableList(tables=TABLES.values()))
|
742
1145
|
|
743
1146
|
def Update(self, request, context):
|
1147
|
+
"""Overrides the Update method from ConnectorServicer.
|
1148
|
+
|
1149
|
+
Args:
|
1150
|
+
request: The gRPC request.
|
1151
|
+
context: The gRPC context.
|
1152
|
+
|
1153
|
+
Yields:
|
1154
|
+
connector_sdk_pb2.UpdateResponse: The update response.
|
1155
|
+
"""
|
744
1156
|
configuration = self.configuration if self.configuration else request.configuration
|
745
1157
|
state = self.state if self.state else json.loads(request.state_json)
|
746
1158
|
|
@@ -757,7 +1169,15 @@ class Connector(connector_sdk_pb2_grpc.ConnectorServicer):
|
|
757
1169
|
raise e
|
758
1170
|
|
759
1171
|
|
760
|
-
def find_connector_object(project_path):
|
1172
|
+
def find_connector_object(project_path) -> Connector:
|
1173
|
+
"""Finds the connector object in the given project path.
|
1174
|
+
Args:
|
1175
|
+
project_path (str): The path to the project.
|
1176
|
+
|
1177
|
+
Returns:
|
1178
|
+
object: The connector object.
|
1179
|
+
"""
|
1180
|
+
|
761
1181
|
module_name = "connector_connector_code"
|
762
1182
|
connector_py = os.path.join(project_path, "connector.py")
|
763
1183
|
spec = importlib.util.spec_from_file_location(module_name, connector_py)
|
@@ -775,6 +1195,10 @@ def find_connector_object(project_path):
|
|
775
1195
|
|
776
1196
|
|
777
1197
|
def main():
|
1198
|
+
"""The main entry point for the script.
|
1199
|
+
Parses command line arguments and passes them to connector object methods
|
1200
|
+
"""
|
1201
|
+
|
778
1202
|
parser = argparse.ArgumentParser(allow_abbrev=False)
|
779
1203
|
|
780
1204
|
# Positional
|
{fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fivetran_connector_sdk
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.8.12.1
|
4
4
|
Summary: Build custom connectors on Fivetran platform
|
5
5
|
Author-email: Fivetran <developers@fivetran.com>
|
6
6
|
Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
|
@@ -10,9 +10,9 @@ Classifier: License :: OSI Approved :: MIT License
|
|
10
10
|
Classifier: Operating System :: OS Independent
|
11
11
|
Requires-Python: >=3.9
|
12
12
|
Description-Content-Type: text/markdown
|
13
|
-
License-File: LICENSE
|
14
13
|
Requires-Dist: grpcio ==1.60.1
|
15
14
|
Requires-Dist: grpcio-tools ==1.60.1
|
16
15
|
Requires-Dist: requests ==2.31.0
|
17
16
|
Requires-Dist: get-pypi-latest-version ==0.0.12
|
17
|
+
Requires-Dist: pipreqs ==0.5.0
|
18
18
|
|
{fivetran_connector_sdk-0.7.24.2.dist-info → fivetran_connector_sdk-0.8.12.1.dist-info}/RECORD
RENAMED
@@ -1,4 +1,4 @@
|
|
1
|
-
fivetran_connector_sdk/__init__.py,sha256=
|
1
|
+
fivetran_connector_sdk/__init__.py,sha256=OaXaLM_1xxs7sF6sBAB3aBW4b9DTpc_mR6q23pGgQnw,49697
|
2
2
|
fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
fivetran_connector_sdk/protos/common_pb2.py,sha256=kUwVcyZHgLigNR-KnHZn7dHrlxaMnUXqzprsRx6T72M,6831
|
4
4
|
fivetran_connector_sdk/protos/common_pb2.pyi,sha256=S0hdIzoXyyOKD5cjiGeDDLYpQ9J3LjAvu4rCj1JvJWE,9038
|
@@ -6,9 +6,8 @@ fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXH
|
|
6
6
|
fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=9Ke_Ti1s0vAeXapfXT-EryrT2-TSGQb8mhs4gxTpUMk,7732
|
7
7
|
fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=FWYxRgshEF3QDYAE0TM_mv4N2gGvkxCH_uPpxnMc4oA,8406
|
8
8
|
fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=ZfJLp4DW7uP4pFOZ74s_wQ6tD3eIPi-08UfnLwe4tzo,7163
|
9
|
-
fivetran_connector_sdk-0.
|
10
|
-
fivetran_connector_sdk-0.
|
11
|
-
fivetran_connector_sdk-0.
|
12
|
-
fivetran_connector_sdk-0.
|
13
|
-
fivetran_connector_sdk-0.
|
14
|
-
fivetran_connector_sdk-0.7.24.2.dist-info/RECORD,,
|
9
|
+
fivetran_connector_sdk-0.8.12.1.dist-info/METADATA,sha256=nZR69biAGw0z-EXCSUTcXXWcIpx1BkkNLQW3RukPAOY,708
|
10
|
+
fivetran_connector_sdk-0.8.12.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
11
|
+
fivetran_connector_sdk-0.8.12.1.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
|
12
|
+
fivetran_connector_sdk-0.8.12.1.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
|
13
|
+
fivetran_connector_sdk-0.8.12.1.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
MIT License
|
2
|
-
|
3
|
-
Copyright (c) 2023 Fivetran
|
4
|
-
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
7
|
-
in the Software without restriction, including without limitation the rights
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
10
|
-
furnished to do so, subject to the following conditions:
|
11
|
-
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
13
|
-
copies or substantial portions of the Software.
|
14
|
-
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
-
SOFTWARE.
|
File without changes
|
File without changes
|