fivetran-connector-sdk 1.4.5__py3-none-any.whl → 1.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,278 @@
1
+ import os
2
+ import json
3
+ import inspect
4
+
5
+ from datetime import datetime
6
+ from google.protobuf import timestamp_pb2
7
+
8
+ from fivetran_connector_sdk import constants
9
+ from fivetran_connector_sdk.constants import (
10
+ JAVA_LONG_MAX_VALUE,
11
+ TABLES,
12
+ )
13
+ from fivetran_connector_sdk.helpers import (
14
+ get_renamed_table_name,
15
+ get_renamed_column_name,
16
+ print_library_log,
17
+ )
18
+ from fivetran_connector_sdk.logger import Logging
19
+ from fivetran_connector_sdk.protos import connector_sdk_pb2, common_pb2
20
+
21
+ class Operations:
22
+ @staticmethod
23
+ def upsert(table: str, data: dict) -> list[connector_sdk_pb2.UpdateResponse]:
24
+ """Updates records with the same primary key if already present in the destination. Inserts new records if not already present in the destination.
25
+
26
+ Args:
27
+ table (str): The name of the table.
28
+ data (dict): The data to upsert.
29
+
30
+ Returns:
31
+ list[connector_sdk_pb2.UpdateResponse]: A list of update responses.
32
+ """
33
+ if constants.DEBUGGING:
34
+ _yield_check(inspect.stack())
35
+
36
+ responses = []
37
+
38
+ table = get_renamed_table_name(table)
39
+ columns = _get_columns(table)
40
+ if not columns:
41
+ for field in data.keys():
42
+ field_name = get_renamed_column_name(field)
43
+ columns[field_name] = common_pb2.Column(
44
+ name=field_name, type=common_pb2.DataType.UNSPECIFIED, primary_key=False)
45
+ new_table = common_pb2.Table(name=table, columns=columns.values())
46
+ TABLES[table] = new_table
47
+
48
+ mapped_data = _map_data_to_columns(data, columns)
49
+ record = connector_sdk_pb2.Record(
50
+ schema_name=None,
51
+ table_name=table,
52
+ type=common_pb2.OpType.UPSERT,
53
+ data=mapped_data
54
+ )
55
+
56
+ responses.append(
57
+ connector_sdk_pb2.UpdateResponse(
58
+ operation=connector_sdk_pb2.Operation(record=record)))
59
+
60
+ return responses
61
+
62
+ @staticmethod
63
+ def update(table: str, modified: dict) -> connector_sdk_pb2.UpdateResponse:
64
+ """Performs an update operation on the specified table with the given modified data.
65
+
66
+ Args:
67
+ table (str): The name of the table.
68
+ modified (dict): The modified data.
69
+
70
+ Returns:
71
+ connector_sdk_pb2.UpdateResponse: The update response.
72
+ """
73
+ if constants.DEBUGGING:
74
+ _yield_check(inspect.stack())
75
+
76
+ table = get_renamed_table_name(table)
77
+ columns = _get_columns(table)
78
+ mapped_data = _map_data_to_columns(modified, columns)
79
+ record = connector_sdk_pb2.Record(
80
+ schema_name=None,
81
+ table_name=table,
82
+ type=common_pb2.OpType.UPDATE,
83
+ data=mapped_data
84
+ )
85
+
86
+ return connector_sdk_pb2.UpdateResponse(
87
+ operation=connector_sdk_pb2.Operation(record=record))
88
+
89
+ @staticmethod
90
+ def delete(table: str, keys: dict) -> connector_sdk_pb2.UpdateResponse:
91
+ """Performs a soft delete operation on the specified table with the given keys.
92
+
93
+ Args:
94
+ table (str): The name of the table.
95
+ keys (dict): The keys to delete.
96
+
97
+ Returns:
98
+ connector_sdk_pb2.UpdateResponse: The delete response.
99
+ """
100
+ if constants.DEBUGGING:
101
+ _yield_check(inspect.stack())
102
+
103
+ table = get_renamed_table_name(table)
104
+ columns = _get_columns(table)
105
+ mapped_data = _map_data_to_columns(keys, columns)
106
+ record = connector_sdk_pb2.Record(
107
+ schema_name=None,
108
+ table_name=table,
109
+ type=common_pb2.OpType.DELETE,
110
+ data=mapped_data
111
+ )
112
+
113
+ return connector_sdk_pb2.UpdateResponse(
114
+ operation=connector_sdk_pb2.Operation(record=record))
115
+
116
+ @staticmethod
117
+ def checkpoint(state: dict) -> connector_sdk_pb2.UpdateResponse:
118
+ """Checkpoint saves the connector's state. State is a dict which stores information to continue the
119
+ sync from where it left off in the previous sync. For example, you may choose to have a field called
120
+ "cursor" with a timestamp value to indicate up to when the data has been synced. This makes it possible
121
+ for the next sync to fetch data incrementally from that time forward. See below for a few example fields
122
+ which act as parameters for use by the connector code.\n
123
+ {
124
+ "initialSync": true,\n
125
+ "cursor": "1970-01-01T00:00:00.00Z",\n
126
+ "last_resync": "1970-01-01T00:00:00.00Z",\n
127
+ "thread_count": 5,\n
128
+ "api_quota_left": 5000000
129
+ }
130
+
131
+ Args:
132
+ state (dict): The state to checkpoint/save.
133
+
134
+ Returns:
135
+ connector_sdk_pb2.UpdateResponse: The checkpoint response.
136
+ """
137
+ if constants.DEBUGGING:
138
+ _yield_check(inspect.stack())
139
+
140
+ return connector_sdk_pb2.UpdateResponse(
141
+ operation=connector_sdk_pb2.Operation(checkpoint=connector_sdk_pb2.Checkpoint(
142
+ state_json=json.dumps(state))))
143
+
144
+ def _get_columns(table: str) -> dict:
145
+ """Retrieves the columns for the specified table.
146
+
147
+ Args:
148
+ table (str): The name of the table.
149
+
150
+ Returns:
151
+ dict: The columns for the table.
152
+ """
153
+ columns = {}
154
+ if table in TABLES:
155
+ for column in TABLES[table].columns:
156
+ columns[column.name] = column
157
+
158
+ return columns
159
+
160
+
161
+ def _map_data_to_columns(data: dict, columns: dict) -> dict:
162
+ """Maps data to the specified columns.
163
+
164
+ Args:
165
+ data (dict): The data to map.
166
+ columns (dict): The columns to map the data to.
167
+
168
+ Returns:
169
+ dict: The mapped data.
170
+ """
171
+ mapped_data = {}
172
+ for k, v in data.items():
173
+ key = get_renamed_column_name(k)
174
+ if v is None:
175
+ mapped_data[key] = common_pb2.ValueType(null=True)
176
+ elif (key in columns) and columns[key].type != common_pb2.DataType.UNSPECIFIED:
177
+ map_defined_data_type(columns, key, mapped_data, v)
178
+ else:
179
+ map_inferred_data_type(key, mapped_data, v)
180
+ return mapped_data
181
+
182
+ def map_inferred_data_type(k, mapped_data, v):
183
+ # We can infer type from the value
184
+ if isinstance(v, int):
185
+ if abs(v) > JAVA_LONG_MAX_VALUE:
186
+ mapped_data[k] = common_pb2.ValueType(float=v)
187
+ else:
188
+ mapped_data[k] = common_pb2.ValueType(long=v)
189
+ elif isinstance(v, float):
190
+ mapped_data[k] = common_pb2.ValueType(float=v)
191
+ elif isinstance(v, bool):
192
+ mapped_data[k] = common_pb2.ValueType(bool=v)
193
+ elif isinstance(v, bytes):
194
+ mapped_data[k] = common_pb2.ValueType(binary=v)
195
+ elif isinstance(v, list):
196
+ raise ValueError(
197
+ "Values for the columns cannot be of type 'list'. Please ensure that all values are of a supported type. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#supporteddatatypes")
198
+ elif isinstance(v, dict):
199
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
200
+ elif isinstance(v, str):
201
+ mapped_data[k] = common_pb2.ValueType(string=v)
202
+ else:
203
+ # Convert arbitrary objects to string
204
+ mapped_data[k] = common_pb2.ValueType(string=str(v))
205
+
206
+
207
+ def map_defined_data_type(columns, k, mapped_data, v):
208
+ if columns[k].type == common_pb2.DataType.BOOLEAN:
209
+ mapped_data[k] = common_pb2.ValueType(bool=v)
210
+ elif columns[k].type == common_pb2.DataType.SHORT:
211
+ mapped_data[k] = common_pb2.ValueType(short=v)
212
+ elif columns[k].type == common_pb2.DataType.INT:
213
+ mapped_data[k] = common_pb2.ValueType(int=v)
214
+ elif columns[k].type == common_pb2.DataType.LONG:
215
+ mapped_data[k] = common_pb2.ValueType(long=v)
216
+ elif columns[k].type == common_pb2.DataType.DECIMAL:
217
+ mapped_data[k] = common_pb2.ValueType(decimal=v)
218
+ elif columns[k].type == common_pb2.DataType.FLOAT:
219
+ mapped_data[k] = common_pb2.ValueType(float=v)
220
+ elif columns[k].type == common_pb2.DataType.DOUBLE:
221
+ mapped_data[k] = common_pb2.ValueType(double=v)
222
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATE:
223
+ timestamp = timestamp_pb2.Timestamp()
224
+ dt = datetime.strptime(v, "%Y-%m-%d")
225
+ timestamp.FromDatetime(dt)
226
+ mapped_data[k] = common_pb2.ValueType(naive_date=timestamp)
227
+ elif columns[k].type == common_pb2.DataType.NAIVE_DATETIME:
228
+ if '.' not in v: v = v + ".0"
229
+ timestamp = timestamp_pb2.Timestamp()
230
+ dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
231
+ timestamp.FromDatetime(dt)
232
+ mapped_data[k] = common_pb2.ValueType(naive_datetime=timestamp)
233
+ elif columns[k].type == common_pb2.DataType.UTC_DATETIME:
234
+ timestamp = timestamp_pb2.Timestamp()
235
+ dt = v if isinstance(v, datetime) else _parse_datetime_str(v)
236
+ timestamp.FromDatetime(dt)
237
+ mapped_data[k] = common_pb2.ValueType(utc_datetime=timestamp)
238
+ elif columns[k].type == common_pb2.DataType.BINARY:
239
+ mapped_data[k] = common_pb2.ValueType(binary=v)
240
+ elif columns[k].type == common_pb2.DataType.XML:
241
+ mapped_data[k] = common_pb2.ValueType(xml=v)
242
+ elif columns[k].type == common_pb2.DataType.STRING:
243
+ incoming = v if isinstance(v, str) else str(v)
244
+ mapped_data[k] = common_pb2.ValueType(string=incoming)
245
+ elif columns[k].type == common_pb2.DataType.JSON:
246
+ mapped_data[k] = common_pb2.ValueType(json=json.dumps(v))
247
+ else:
248
+ raise ValueError(f"Unsupported data type encountered: {columns[k].type}. Please use valid data types.")
249
+
250
+ def _parse_datetime_str(dt):
251
+ return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S.%f%z" if '.' in dt else "%Y-%m-%dT%H:%M:%S%z")
252
+
253
+
254
+ def _yield_check(stack):
255
+ """Checks for the presence of 'yield' in the calling code.
256
+ Args:
257
+ stack: The stack frame to check.
258
+ """
259
+
260
+ # Known issue with inspect.getmodule() and yield behavior in a frozen application.
261
+ # When using inspect.getmodule() on stack frames obtained by inspect.stack(), it fails
262
+ # to resolve the modules in a frozen application due to incompatible assumptions about
263
+ # the file paths. This can lead to unexpected behavior, such as yield returning None or
264
+ # the failure to retrieve the module inside a frozen app
265
+ # (Reference: https://github.com/pyinstaller/pyinstaller/issues/5963)
266
+
267
+ called_method = stack[0].function
268
+ calling_code = stack[1].code_context[0]
269
+ if f"{called_method}(" in calling_code:
270
+ if 'yield' not in calling_code:
271
+ print_library_log(
272
+ f"Please add 'yield' to '{called_method}' operation on line {stack[1].lineno} in file '{stack[1].filename}'", Logging.Level.SEVERE)
273
+ os._exit(1)
274
+ else:
275
+ # This should never happen
276
+ raise RuntimeError(
277
+ f"The '{called_method}' function is missing in the connector calling code '{calling_code}'. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
278
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fivetran_connector_sdk
3
- Version: 1.4.5
3
+ Version: 1.4.6
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
@@ -0,0 +1,18 @@
1
+ fivetran_connector_sdk/__init__.py,sha256=gke-JGyABQL-yjotb6mzrm4riFW7sYhgWbgpj_WG0Ns,20720
2
+ fivetran_connector_sdk/connector_helper.py,sha256=4NnHlr40dUPdta9gctN0WE8OYBKnZDEJKaurh8e45UE,38960
3
+ fivetran_connector_sdk/constants.py,sha256=tY8-fwB7O11orifFcDIELxfvYNz9-bNUhh6f5cXXws0,2287
4
+ fivetran_connector_sdk/helpers.py,sha256=6a-lovmNRTE4HmXSNnMGSiCPgJnplU_qGX6M8t0Hccw,12327
5
+ fivetran_connector_sdk/logger.py,sha256=G35fzBhdRMJ0oifls18j_7gqfP3egZFiiafubRmuG5A,2816
6
+ fivetran_connector_sdk/operations.py,sha256=fg-6JtRoIrklBEzRroYNqj2xoXmWA1q-v9bQQA5SATs,11060
7
+ fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ fivetran_connector_sdk/protos/common_pb2.py,sha256=kUwVcyZHgLigNR-KnHZn7dHrlxaMnUXqzprsRx6T72M,6831
9
+ fivetran_connector_sdk/protos/common_pb2.pyi,sha256=S0hdIzoXyyOKD5cjiGeDDLYpQ9J3LjAvu4rCj1JvJWE,9038
10
+ fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
11
+ fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=9Ke_Ti1s0vAeXapfXT-EryrT2-TSGQb8mhs4gxTpUMk,7732
12
+ fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=FWYxRgshEF3QDYAE0TM_mv4N2gGvkxCH_uPpxnMc4oA,8406
13
+ fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=ZfJLp4DW7uP4pFOZ74s_wQ6tD3eIPi-08UfnLwe4tzo,7163
14
+ fivetran_connector_sdk-1.4.6.dist-info/METADATA,sha256=espyZR0szObY5-ikesFbR6EPKkzJWYuetARZFSpFlRY,3150
15
+ fivetran_connector_sdk-1.4.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ fivetran_connector_sdk-1.4.6.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
17
+ fivetran_connector_sdk-1.4.6.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
18
+ fivetran_connector_sdk-1.4.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,13 +0,0 @@
1
- fivetran_connector_sdk/__init__.py,sha256=BF_eAT9h-UCVMX9d-ZDgC8jtOYMAp15gbKzEMPoLMos,86932
2
- fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- fivetran_connector_sdk/protos/common_pb2.py,sha256=kUwVcyZHgLigNR-KnHZn7dHrlxaMnUXqzprsRx6T72M,6831
4
- fivetran_connector_sdk/protos/common_pb2.pyi,sha256=S0hdIzoXyyOKD5cjiGeDDLYpQ9J3LjAvu4rCj1JvJWE,9038
5
- fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
6
- fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=9Ke_Ti1s0vAeXapfXT-EryrT2-TSGQb8mhs4gxTpUMk,7732
7
- fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=FWYxRgshEF3QDYAE0TM_mv4N2gGvkxCH_uPpxnMc4oA,8406
8
- fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=ZfJLp4DW7uP4pFOZ74s_wQ6tD3eIPi-08UfnLwe4tzo,7163
9
- fivetran_connector_sdk-1.4.5.dist-info/METADATA,sha256=oHKyhb91aaZ_obddY7yURX_SXNDqHYocbmi9lIDnlVs,3150
10
- fivetran_connector_sdk-1.4.5.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
11
- fivetran_connector_sdk-1.4.5.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
12
- fivetran_connector_sdk-1.4.5.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
13
- fivetran_connector_sdk-1.4.5.dist-info/RECORD,,