fivetran-connector-sdk 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fivetran_connector_sdk/__init__.py +6 -2
- fivetran_connector_sdk/connector_helper.py +6 -5
- fivetran_connector_sdk/constants.py +1 -2
- fivetran_connector_sdk/operation_stream.py +166 -0
- fivetran_connector_sdk/operations.py +35 -166
- {fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/METADATA +2 -2
- {fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/RECORD +10 -9
- {fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/WHEEL +0 -0
- {fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/entry_points.txt +0 -0
- {fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/top_level.txt +0 -0
@@ -42,7 +42,7 @@ from fivetran_connector_sdk.connector_helper import (
|
|
42
42
|
|
43
43
|
# Version format: <major_version>.<minor_version>.<patch_version>
|
44
44
|
# (where Major Version = 2, Minor Version is incremental MM from Aug 25 onwards, Patch Version is incremental within a month)
|
45
|
-
__version__ = "2.0.
|
45
|
+
__version__ = "2.0.1"
|
46
46
|
TESTER_VERSION = TESTER_VER
|
47
47
|
MAX_MESSAGE_LENGTH = 32 * 1024 * 1024 # 32MB
|
48
48
|
|
@@ -108,7 +108,11 @@ class Connector(connector_sdk_pb2_grpc.SourceConnectorServicer):
|
|
108
108
|
if python_version:
|
109
109
|
connection_config["python_version"] = python_version
|
110
110
|
|
111
|
-
|
111
|
+
if not force:
|
112
|
+
validate_requirements_file(project_path, True, __version__)
|
113
|
+
else:
|
114
|
+
print_library_log(
|
115
|
+
"Skipping requirements.txt validation as --force flag is set. Ensure that your code is structured accordingly and all dependencies are listed in `requirements.txt`")
|
112
116
|
|
113
117
|
group_id, group_name = get_group_info(group, deploy_key)
|
114
118
|
connection_id, service = get_connection_id(connection, group, group_id, deploy_key) or (None, None)
|
@@ -314,7 +314,7 @@ def fetch_requirements_as_dict(file_path: str) -> dict:
|
|
314
314
|
print_library_log(f"Invalid requirement format: '{requirement}'", Logging.Level.SEVERE)
|
315
315
|
return requirements_dict
|
316
316
|
|
317
|
-
def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
317
|
+
def validate_requirements_file(project_path: str, is_deploy: bool, version: str):
|
318
318
|
"""Validates the `requirements.txt` file against the project's actual dependencies.
|
319
319
|
|
320
320
|
This method generates a temporary requirements file using `pipreqs`, compares
|
@@ -326,7 +326,6 @@ def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
|
326
326
|
project_path (str): The path to the project directory containing the `requirements.txt`.
|
327
327
|
is_deploy (bool): If `True`, the method will exit the process on critical errors.
|
328
328
|
version (str): The current version of the connector.
|
329
|
-
force (bool): Force update an existing connection.
|
330
329
|
|
331
330
|
"""
|
332
331
|
# Detect and exclude virtual environment directories
|
@@ -385,7 +384,7 @@ def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
|
385
384
|
if version_mismatch_deps:
|
386
385
|
print_library_log("We recommend using the current stable version for the following libraries:", Logging.Level.WARNING)
|
387
386
|
print(version_mismatch_deps)
|
388
|
-
if is_deploy
|
387
|
+
if is_deploy:
|
389
388
|
confirm = input(
|
390
389
|
f"Would you like us to update {REQUIREMENTS_TXT} to the current stable versions of the dependent libraries? (y/N):")
|
391
390
|
if confirm.lower() == "y":
|
@@ -400,7 +399,7 @@ def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
|
400
399
|
missing_deps = {key: tmp_requirements[key] for key in (tmp_requirements.keys() - requirements.keys())}
|
401
400
|
if missing_deps:
|
402
401
|
handle_missing_deps(missing_deps)
|
403
|
-
if is_deploy
|
402
|
+
if is_deploy:
|
404
403
|
confirm = input(
|
405
404
|
f"Would you like us to update {REQUIREMENTS_TXT} to add missing dependent libraries? (y/N):")
|
406
405
|
if confirm.lower() == "n":
|
@@ -414,7 +413,7 @@ def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
|
414
413
|
unused_deps = list(requirements.keys() - tmp_requirements.keys())
|
415
414
|
if unused_deps:
|
416
415
|
handle_unused_deps(unused_deps, version)
|
417
|
-
if is_deploy
|
416
|
+
if is_deploy:
|
418
417
|
confirm = input(f"Would you like us to update {REQUIREMENTS_TXT} to remove the unused libraries? (y/N):")
|
419
418
|
if confirm.lower() == "n":
|
420
419
|
if 'fivetran_connector_sdk' in unused_deps or 'requests' in unused_deps:
|
@@ -432,6 +431,8 @@ def validate_requirements_file(project_path: str, is_deploy: bool, version: str
|
|
432
431
|
with open(requirements_file_path, "w", encoding=UTF_8) as file:
|
433
432
|
file.write("\n".join(requirements.values()))
|
434
433
|
print_library_log(f"`{REQUIREMENTS_TXT}` has been updated successfully.")
|
434
|
+
elif not requirements:
|
435
|
+
delete_file_if_exists(requirements_file_path)
|
435
436
|
|
436
437
|
if is_deploy: print_library_log(f"Validation of {REQUIREMENTS_TXT} completed.")
|
437
438
|
|
@@ -43,8 +43,7 @@ VIRTUAL_ENV_CONFIG = "pyvenv.cfg"
|
|
43
43
|
ROOT_FILENAME = "connector.py"
|
44
44
|
MAX_RECORDS_IN_BATCH = 100
|
45
45
|
MAX_BATCH_SIZE_IN_BYTES = 100000 # 100 KB
|
46
|
-
|
47
|
-
QUEUE_SIZE = int(os.environ.get("QUEUE_SIZE", 1))
|
46
|
+
QUEUE_SIZE = 100
|
48
47
|
|
49
48
|
# Compile patterns used in the implementation
|
50
49
|
WORD_DASH_DOT_PATTERN = re.compile(r'^[\w.-]*$')
|
@@ -0,0 +1,166 @@
|
|
1
|
+
import queue
|
2
|
+
import threading
|
3
|
+
|
4
|
+
from fivetran_connector_sdk.constants import (
|
5
|
+
QUEUE_SIZE,
|
6
|
+
MAX_RECORDS_IN_BATCH,
|
7
|
+
MAX_BATCH_SIZE_IN_BYTES,
|
8
|
+
CHECKPOINT_OP_TIMEOUT_IN_SEC
|
9
|
+
)
|
10
|
+
from fivetran_connector_sdk.protos import connector_sdk_pb2
|
11
|
+
|
12
|
+
|
13
|
+
class _OperationStream:
|
14
|
+
"""
|
15
|
+
A simple iterator-based stream backed by a queue for producing and consuming operations.
|
16
|
+
|
17
|
+
This class allows adding data items into a queue and consuming them using standard iteration.
|
18
|
+
It uses a sentinel object to signal the end of the stream.
|
19
|
+
|
20
|
+
Example:
|
21
|
+
stream = _OperationStream()
|
22
|
+
stream.add("response1")
|
23
|
+
stream.mark_done()
|
24
|
+
|
25
|
+
for response in stream:
|
26
|
+
print(response) # prints "response1"
|
27
|
+
"""
|
28
|
+
|
29
|
+
def __init__(self):
|
30
|
+
"""
|
31
|
+
Initializes the operation stream with a queue and a sentinel object.
|
32
|
+
"""
|
33
|
+
self._queue = queue.Queue(maxsize=QUEUE_SIZE)
|
34
|
+
self._sentinel = object()
|
35
|
+
self._is_done = False
|
36
|
+
self._buffer = []
|
37
|
+
self._buffer_record_count = 0
|
38
|
+
self._buffer_size_bytes = 0
|
39
|
+
self._checkpoint_lock = threading.Lock()
|
40
|
+
self._checkpoint_flush_signal = threading.Event()
|
41
|
+
self._checkpoint_flush_signal.set()
|
42
|
+
|
43
|
+
def __iter__(self):
|
44
|
+
"""
|
45
|
+
Returns the iterator instance itself.
|
46
|
+
"""
|
47
|
+
return self
|
48
|
+
|
49
|
+
def add(self, operation):
|
50
|
+
"""
|
51
|
+
Adds an operation to the stream. Guarantees that operations within a single thread are processed in the order.
|
52
|
+
|
53
|
+
In multithreaded environment if a thread initiates a checkpoint, it's producer is blocked until the
|
54
|
+
checkpoint flush is complete. This block is localized, other threads
|
55
|
+
remain unblocked and can continue to perform other operations
|
56
|
+
(such as upserts, updates, deletes), but they are prevented from initiating a new checkpoint
|
57
|
+
until the existing one is finished.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
operation (object): The data item to add to the stream.
|
61
|
+
"""
|
62
|
+
if isinstance(operation, connector_sdk_pb2.Checkpoint):
|
63
|
+
# lock to ensure checkpoint operations are processed one at a time
|
64
|
+
with self._checkpoint_lock:
|
65
|
+
# clear the signal to indicate checkpoint operation is being processed.
|
66
|
+
self._checkpoint_flush_signal.clear()
|
67
|
+
self._queue.put(operation)
|
68
|
+
# wait until the consumer flushes the buffer and sets the flag.
|
69
|
+
if not self._checkpoint_flush_signal.wait(CHECKPOINT_OP_TIMEOUT_IN_SEC):
|
70
|
+
raise TimeoutError(
|
71
|
+
"Checkpoint flush timed out. Consumer may have failed to process checkpoint."
|
72
|
+
)
|
73
|
+
else:
|
74
|
+
self._queue.put(operation)
|
75
|
+
|
76
|
+
def unblock(self):
|
77
|
+
"""
|
78
|
+
Unblocks the queue, called by consumer after the checkpoint flush is completed.
|
79
|
+
"""
|
80
|
+
self._checkpoint_flush_signal.set()
|
81
|
+
|
82
|
+
def mark_done(self):
|
83
|
+
"""
|
84
|
+
Marks the end of the stream by putting a sentinel in the queue.
|
85
|
+
"""
|
86
|
+
self._queue.put(self._sentinel)
|
87
|
+
|
88
|
+
def __next__(self):
|
89
|
+
"""
|
90
|
+
Retrieves the next item from the stream. Raises StopIteration when the sentinel is encountered.
|
91
|
+
|
92
|
+
Returns:
|
93
|
+
object: The next item in the stream.
|
94
|
+
|
95
|
+
Raises:
|
96
|
+
StopIteration: If the sentinel object is encountered.
|
97
|
+
"""
|
98
|
+
# If stream is completed and buffer is empty, raise StopIteration. Else flush the buffer.
|
99
|
+
if self._is_done and not self._buffer:
|
100
|
+
raise StopIteration
|
101
|
+
|
102
|
+
if self._is_done:
|
103
|
+
return self._flush_buffer()
|
104
|
+
|
105
|
+
return self._build_next_batch()
|
106
|
+
|
107
|
+
def _build_next_batch(self):
|
108
|
+
"""
|
109
|
+
Core logic to build the batch. The loop continues until the buffer is full,
|
110
|
+
but can be interrupted by a checkpoint or a sentinel from the producer.
|
111
|
+
|
112
|
+
Returns:
|
113
|
+
connector_sdk_pb2.UpdateResponse or list[connector_sdk_pb2.UpdateResponse]: Either a single response
|
114
|
+
containing records or checkpoint, or a list of responses when flushing data with a checkpoint.
|
115
|
+
|
116
|
+
"""
|
117
|
+
while self._buffer_record_count < MAX_RECORDS_IN_BATCH and self._buffer_size_bytes < MAX_BATCH_SIZE_IN_BYTES:
|
118
|
+
operation = self._queue.get()
|
119
|
+
|
120
|
+
# Case 1: If operation is sentinel, mark the stream as done, flush the buffer.
|
121
|
+
if operation is self._sentinel:
|
122
|
+
self._is_done = True
|
123
|
+
if self._buffer:
|
124
|
+
return self._flush_buffer()
|
125
|
+
else:
|
126
|
+
raise StopIteration
|
127
|
+
|
128
|
+
# Case 2: if operation is a Checkpoint, flush the buffer and send the checkpoint.
|
129
|
+
elif isinstance(operation, connector_sdk_pb2.Checkpoint):
|
130
|
+
return self._flush_buffer_on_checkpoint(operation)
|
131
|
+
|
132
|
+
# it is record, buffer it to flush in batches
|
133
|
+
self._buffer_record_count += 1
|
134
|
+
self._buffer_size_bytes += len(operation.SerializeToString())
|
135
|
+
self._buffer.append(operation)
|
136
|
+
|
137
|
+
# Case 3: If buffer size limit is reached, flush the buffer and return the response.
|
138
|
+
return self._flush_buffer()
|
139
|
+
|
140
|
+
def _flush_buffer_on_checkpoint(self, checkpoint: connector_sdk_pb2.Checkpoint):
|
141
|
+
"""
|
142
|
+
Creates the responses containing the checkpoint and buffered records.
|
143
|
+
|
144
|
+
Args:
|
145
|
+
checkpoint (object): Checkpoint operation to be added to the response.
|
146
|
+
"""
|
147
|
+
responses = []
|
148
|
+
|
149
|
+
if self._buffer:
|
150
|
+
responses.append(self._flush_buffer())
|
151
|
+
|
152
|
+
responses.append(connector_sdk_pb2.UpdateResponse(checkpoint=checkpoint))
|
153
|
+
return responses
|
154
|
+
|
155
|
+
def _flush_buffer(self):
|
156
|
+
"""
|
157
|
+
Flushes the current buffer and returns a response containing the buffered records.
|
158
|
+
|
159
|
+
Returns:
|
160
|
+
connector_sdk_pb2.UpdateResponse: A response containing the buffered records.
|
161
|
+
"""
|
162
|
+
batch_to_flush = self._buffer
|
163
|
+
self._buffer = []
|
164
|
+
self._buffer_record_count = 0
|
165
|
+
self._buffer_size_bytes = 0
|
166
|
+
return connector_sdk_pb2.UpdateResponse(records=connector_sdk_pb2.Records(records=batch_to_flush))
|
@@ -1,18 +1,12 @@
|
|
1
1
|
import json
|
2
2
|
import sys
|
3
|
-
import queue
|
4
|
-
import threading
|
5
3
|
|
6
|
-
from datetime import datetime
|
4
|
+
from datetime import datetime, date
|
7
5
|
from google.protobuf import timestamp_pb2
|
8
6
|
|
9
7
|
from fivetran_connector_sdk.constants import (
|
10
8
|
JAVA_LONG_MAX_VALUE,
|
11
|
-
TABLES
|
12
|
-
MAX_RECORDS_IN_BATCH,
|
13
|
-
MAX_BATCH_SIZE_IN_BYTES,
|
14
|
-
QUEUE_SIZE,
|
15
|
-
CHECKPOINT_OP_TIMEOUT_IN_SEC
|
9
|
+
TABLES
|
16
10
|
)
|
17
11
|
from fivetran_connector_sdk.helpers import (
|
18
12
|
get_renamed_table_name,
|
@@ -21,161 +15,7 @@ from fivetran_connector_sdk.helpers import (
|
|
21
15
|
)
|
22
16
|
from fivetran_connector_sdk.logger import Logging
|
23
17
|
from fivetran_connector_sdk.protos import connector_sdk_pb2, common_pb2
|
24
|
-
|
25
|
-
class _OperationStream:
|
26
|
-
"""
|
27
|
-
A simple iterator-based stream backed by a queue for producing and consuming operations.
|
28
|
-
|
29
|
-
This class allows adding data items into a queue and consuming them using standard iteration.
|
30
|
-
It uses a sentinel object to signal the end of the stream.
|
31
|
-
|
32
|
-
Example:
|
33
|
-
stream = _OperationStream()
|
34
|
-
stream.add("response1")
|
35
|
-
stream.mark_done()
|
36
|
-
|
37
|
-
for response in stream:
|
38
|
-
print(response) # prints "response1"
|
39
|
-
"""
|
40
|
-
|
41
|
-
def __init__(self):
|
42
|
-
"""
|
43
|
-
Initializes the operation stream with a queue and a sentinel object.
|
44
|
-
"""
|
45
|
-
self._queue = queue.Queue(maxsize=QUEUE_SIZE)
|
46
|
-
self._sentinel = object()
|
47
|
-
self._is_done = False
|
48
|
-
self._buffer = []
|
49
|
-
self._buffer_record_count = 0
|
50
|
-
self._buffer_size_bytes = 0
|
51
|
-
self._checkpoint_lock = threading.Lock()
|
52
|
-
self._checkpoint_flush_signal = threading.Event()
|
53
|
-
self._checkpoint_flush_signal.set()
|
54
|
-
|
55
|
-
def __iter__(self):
|
56
|
-
"""
|
57
|
-
Returns the iterator instance itself.
|
58
|
-
"""
|
59
|
-
return self
|
60
|
-
|
61
|
-
def add(self, operation):
|
62
|
-
"""
|
63
|
-
Adds an operation to the stream. Guarantees that operations within a single thread are processed in the order.
|
64
|
-
|
65
|
-
In multithreaded environment if a thread initiates a checkpoint, it's producer is blocked until the
|
66
|
-
checkpoint flush is complete. This block is localized, other threads
|
67
|
-
remain unblocked and can continue to perform other operations
|
68
|
-
(such as upserts, updates, deletes), but they are prevented from initiating a new checkpoint
|
69
|
-
until the existing one is finished.
|
70
|
-
|
71
|
-
Args:
|
72
|
-
operation (object): The data item to add to the stream.
|
73
|
-
"""
|
74
|
-
if isinstance(operation, connector_sdk_pb2.Checkpoint):
|
75
|
-
# lock to ensure checkpoint operations are processed one at a time
|
76
|
-
with self._checkpoint_lock:
|
77
|
-
# clear the signal to indicate checkpoint operation is being processed.
|
78
|
-
self._checkpoint_flush_signal.clear()
|
79
|
-
self._queue.put(operation)
|
80
|
-
# wait until the consumer flushes the buffer and sets the flag.
|
81
|
-
if not self._checkpoint_flush_signal.wait(CHECKPOINT_OP_TIMEOUT_IN_SEC):
|
82
|
-
raise TimeoutError(
|
83
|
-
"Checkpoint flush timed out. Consumer may have failed to process checkpoint."
|
84
|
-
)
|
85
|
-
else:
|
86
|
-
self._queue.put(operation)
|
87
|
-
|
88
|
-
def unblock(self):
|
89
|
-
"""
|
90
|
-
Unblocks the queue, called by consumer after the checkpoint flush is completed.
|
91
|
-
"""
|
92
|
-
self._checkpoint_flush_signal.set()
|
93
|
-
|
94
|
-
def mark_done(self):
|
95
|
-
"""
|
96
|
-
Marks the end of the stream by putting a sentinel in the queue.
|
97
|
-
"""
|
98
|
-
self._queue.put(self._sentinel)
|
99
|
-
|
100
|
-
def __next__(self):
|
101
|
-
"""
|
102
|
-
Retrieves the next item from the stream. Raises StopIteration when the sentinel is encountered.
|
103
|
-
|
104
|
-
Returns:
|
105
|
-
object: The next item in the stream.
|
106
|
-
|
107
|
-
Raises:
|
108
|
-
StopIteration: If the sentinel object is encountered.
|
109
|
-
"""
|
110
|
-
# If stream is completed and buffer is empty, raise StopIteration. Else flush the buffer.
|
111
|
-
if self._is_done and not self._buffer:
|
112
|
-
raise StopIteration
|
113
|
-
|
114
|
-
if self._is_done:
|
115
|
-
return self._flush_buffer()
|
116
|
-
|
117
|
-
return self._build_next_batch()
|
118
|
-
|
119
|
-
def _build_next_batch(self):
|
120
|
-
"""
|
121
|
-
Core logic to build the batch. The loop continues until the buffer is full,
|
122
|
-
but can be interrupted by a checkpoint or a sentinel from the producer.
|
123
|
-
|
124
|
-
Returns:
|
125
|
-
connector_sdk_pb2.UpdateResponse or list[connector_sdk_pb2.UpdateResponse]: Either a single response
|
126
|
-
containing records or checkpoint, or a list of responses when flushing data with a checkpoint.
|
127
|
-
|
128
|
-
"""
|
129
|
-
while self._buffer_record_count < MAX_RECORDS_IN_BATCH and self._buffer_size_bytes < MAX_BATCH_SIZE_IN_BYTES:
|
130
|
-
operation = self._queue.get()
|
131
|
-
|
132
|
-
# Case 1: If operation is sentinel, mark the stream as done, flush the buffer.
|
133
|
-
if operation is self._sentinel:
|
134
|
-
self._is_done = True
|
135
|
-
if self._buffer:
|
136
|
-
return self._flush_buffer()
|
137
|
-
else:
|
138
|
-
raise StopIteration
|
139
|
-
|
140
|
-
# Case 2: if operation is a Checkpoint, flush the buffer and send the checkpoint.
|
141
|
-
elif isinstance(operation, connector_sdk_pb2.Checkpoint):
|
142
|
-
return self._flush_buffer_on_checkpoint(operation)
|
143
|
-
|
144
|
-
# it is record, buffer it to flush in batches
|
145
|
-
self._buffer_record_count += 1
|
146
|
-
self._buffer_size_bytes += len(operation.SerializeToString())
|
147
|
-
self._buffer.append(operation)
|
148
|
-
|
149
|
-
# Case 3: If buffer size limit is reached, flush the buffer and return the response.
|
150
|
-
return self._flush_buffer()
|
151
|
-
|
152
|
-
def _flush_buffer_on_checkpoint(self, checkpoint: connector_sdk_pb2.Checkpoint):
|
153
|
-
"""
|
154
|
-
Creates the responses containing the checkpoint and buffered records.
|
155
|
-
|
156
|
-
Args:
|
157
|
-
checkpoint (object): Checkpoint operation to be added to the response.
|
158
|
-
"""
|
159
|
-
responses = []
|
160
|
-
|
161
|
-
if self._buffer:
|
162
|
-
responses.append(self._flush_buffer())
|
163
|
-
|
164
|
-
responses.append(connector_sdk_pb2.UpdateResponse(checkpoint=checkpoint))
|
165
|
-
return responses
|
166
|
-
|
167
|
-
def _flush_buffer(self):
|
168
|
-
"""
|
169
|
-
Flushes the current buffer and returns a response containing the buffered records.
|
170
|
-
|
171
|
-
Returns:
|
172
|
-
connector_sdk_pb2.UpdateResponse: A response containing the buffered records.
|
173
|
-
"""
|
174
|
-
batch_to_flush = self._buffer
|
175
|
-
self._buffer = []
|
176
|
-
self._buffer_record_count = 0
|
177
|
-
self._buffer_size_bytes = 0
|
178
|
-
return connector_sdk_pb2.UpdateResponse(records=connector_sdk_pb2.Records(records=batch_to_flush))
|
18
|
+
from fivetran_connector_sdk.operation_stream import _OperationStream
|
179
19
|
|
180
20
|
|
181
21
|
_LOG_DATA_TYPE_INFERENCE = {}
|
@@ -393,23 +233,52 @@ def map_defined_data_type(data_type, k, mapped_data, v):
|
|
393
233
|
raise ValueError(f"Unsupported data type encountered: {data_type}. Please use valid data types.")
|
394
234
|
|
395
235
|
def _parse_utc_datetime_str(v):
|
236
|
+
"""
|
237
|
+
Accepts a timezone-aware datetime object or a datetime string in one of the following formats:
|
238
|
+
- '%Y-%m-%dT%H:%M:%S.%f%z' (e.g., '2025-08-12T15:00:00.123456+00:00')
|
239
|
+
- '%Y-%m-%dT%H:%M:%S%z' (e.g., '2025-08-12T15:00:00+00:00')
|
240
|
+
- Same as above formats but with a space instead of 'T' as the separator (e.g., '2025-08-12 15:00:00+00:00')
|
241
|
+
|
242
|
+
Returns a `google.protobuf.timestamp_pb2.Timestamp` object.
|
243
|
+
"""
|
396
244
|
timestamp = timestamp_pb2.Timestamp()
|
397
245
|
dt = v
|
398
246
|
if not isinstance(v, datetime):
|
247
|
+
dt = dt.strip().replace(' ', 'T', 1) # Replace first space with 'T' for ISO format
|
399
248
|
dt = datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S.%f%z" if '.' in dt else "%Y-%m-%dT%H:%M:%S%z")
|
400
249
|
timestamp.FromDatetime(dt)
|
401
250
|
return timestamp
|
402
251
|
|
403
252
|
def _parse_naive_datetime_str(v):
|
404
|
-
|
253
|
+
"""
|
254
|
+
Accepts a datetime.datetime object or naive datetime string in one of the following formats:
|
255
|
+
- '%Y-%m-%dT%H:%M:%S.%f'
|
256
|
+
- '%Y-%m-%d %H:%M:%S.%f'
|
257
|
+
Returns a `google.protobuf.timestamp_pb2.Timestamp` object.
|
258
|
+
"""
|
405
259
|
timestamp = timestamp_pb2.Timestamp()
|
406
|
-
dt =
|
260
|
+
dt = v
|
261
|
+
if not isinstance(v, datetime):
|
262
|
+
dt = dt.strip().replace(' ', 'T', 1) # Replace first space with 'T' for ISO format
|
263
|
+
dt = datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S.%f" if '.' in dt else "%Y-%m-%dT%H:%M:%S")
|
264
|
+
|
407
265
|
timestamp.FromDatetime(dt)
|
408
266
|
return timestamp
|
409
267
|
|
410
268
|
def _parse_naive_date_str(v):
|
269
|
+
"""
|
270
|
+
Accepts a `datetime.date` object or a date string ('YYYY-MM-DD'),
|
271
|
+
Returns a `google.protobuf.timestamp_pb2.Timestamp` object.
|
272
|
+
"""
|
411
273
|
timestamp = timestamp_pb2.Timestamp()
|
412
|
-
|
274
|
+
|
275
|
+
if isinstance(v, str):
|
276
|
+
dt = datetime.strptime(v, "%Y-%m-%d")
|
277
|
+
elif isinstance(v, (datetime, date)):
|
278
|
+
dt = datetime.combine(v if isinstance(v, date) else v.date(), datetime.min.time())
|
279
|
+
else:
|
280
|
+
raise TypeError(f"Expected str, datetime.date, or datetime.datetime, got {type(v)}")
|
281
|
+
|
413
282
|
timestamp.FromDatetime(dt)
|
414
283
|
return timestamp
|
415
284
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: fivetran_connector_sdk
|
3
|
-
Version: 2.0.
|
3
|
+
Version: 2.0.1
|
4
4
|
Summary: Build custom connectors on Fivetran platform
|
5
5
|
Author-email: Fivetran <developers@fivetran.com>
|
6
6
|
Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
|
@@ -13,7 +13,7 @@ Requires-Python: >=3.9
|
|
13
13
|
Description-Content-Type: text/markdown
|
14
14
|
Requires-Dist: grpcio==1.71.0
|
15
15
|
Requires-Dist: grpcio-tools==1.71.0
|
16
|
-
Requires-Dist: requests==2.32.
|
16
|
+
Requires-Dist: requests==2.32.5
|
17
17
|
Requires-Dist: pipreqs==0.5.0
|
18
18
|
Requires-Dist: prompt-toolkit==3.0.51
|
19
19
|
Requires-Dist: unidecode==1.4.0
|
@@ -1,9 +1,10 @@
|
|
1
|
-
fivetran_connector_sdk/__init__.py,sha256=
|
2
|
-
fivetran_connector_sdk/connector_helper.py,sha256=
|
3
|
-
fivetran_connector_sdk/constants.py,sha256=
|
1
|
+
fivetran_connector_sdk/__init__.py,sha256=PJ69wORro45NSU021JB7XxI-OaiJpNQgj034YGO6vNg,22765
|
2
|
+
fivetran_connector_sdk/connector_helper.py,sha256=w7ZNoKJ14T8KlzMn_IkztglJR-szGWeBni-QAOGDKH4,43022
|
3
|
+
fivetran_connector_sdk/constants.py,sha256=6HQgae2h22dlnJXFYZfaVQtE5QKYtOmvTY4AWaZwQLk,2433
|
4
4
|
fivetran_connector_sdk/helpers.py,sha256=k_iBaRacPN3YkOkZ8bLuflNYXkUrtuj6fYH_rV1M-RI,15224
|
5
5
|
fivetran_connector_sdk/logger.py,sha256=ud8v8-mKx65OAPaZvxBqt2-CU0vjgBeiYwuiqsYh_hA,3063
|
6
|
-
fivetran_connector_sdk/
|
6
|
+
fivetran_connector_sdk/operation_stream.py,sha256=DXLDv961xZ_GVSEPUFLtZy0IEf_ayQSEXFpEJp-CAu4,6194
|
7
|
+
fivetran_connector_sdk/operations.py,sha256=Ez0vxaZ81q4vbUXr-vvL0PPeBnynjct4W8QboSY9mYU,12324
|
7
8
|
fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
9
|
fivetran_connector_sdk/protos/common_pb2.py,sha256=zkzs6Rd-lvsev6Nsq37xc4HLJZ_uNXPkotCLY7Y7i5U,8770
|
9
10
|
fivetran_connector_sdk/protos/common_pb2.pyi,sha256=FdqlPKRqiXdUDT3e7adP5X42_Qzv_ItydUNJFKnJJIE,11478
|
@@ -11,8 +12,8 @@ fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=qni6h6BoA1nwJXr2bNtznfTk
|
|
11
12
|
fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=Inv87MlK5Q56GNvMNFQHyqIePDMKnkW9y_BrT9DgPck,7835
|
12
13
|
fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=3AC-bK6ZM-Bmr_RETOB3y_0u4ATWlwcbHzqVanDuOB0,8115
|
13
14
|
fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=bGlvc_vGwA9-FTqrj-BYlVcA-7jS8A9MSZ-XpZFytvY,8795
|
14
|
-
fivetran_connector_sdk-2.0.
|
15
|
-
fivetran_connector_sdk-2.0.
|
16
|
-
fivetran_connector_sdk-2.0.
|
17
|
-
fivetran_connector_sdk-2.0.
|
18
|
-
fivetran_connector_sdk-2.0.
|
15
|
+
fivetran_connector_sdk-2.0.1.dist-info/METADATA,sha256=V9188ij6TZhdwgtiondYi-jwyGYVH2HTyEDdmgvhiB8,3188
|
16
|
+
fivetran_connector_sdk-2.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
17
|
+
fivetran_connector_sdk-2.0.1.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
|
18
|
+
fivetran_connector_sdk-2.0.1.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
|
19
|
+
fivetran_connector_sdk-2.0.1.dist-info/RECORD,,
|
File without changes
|
{fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/entry_points.txt
RENAMED
File without changes
|
{fivetran_connector_sdk-2.0.0.dist-info → fivetran_connector_sdk-2.0.1.dist-info}/top_level.txt
RENAMED
File without changes
|