fivetran-connector-sdk 1.7.4__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,8 +22,7 @@ from fivetran_connector_sdk.operations import Operations
22
22
  from fivetran_connector_sdk import constants
23
23
  from fivetran_connector_sdk.constants import (
24
24
  TESTER_VER, VERSION_FILENAME, UTF_8,
25
- VALID_COMMANDS, DEFAULT_PYTHON_VERSION, SUPPORTED_PYTHON_VERSIONS, TABLES,
26
- CONNECTOR_SDK_NO_YIELD_LABEL
25
+ VALID_COMMANDS, DEFAULT_PYTHON_VERSION, SUPPORTED_PYTHON_VERSIONS, TABLES
27
26
  )
28
27
  from fivetran_connector_sdk.helpers import (
29
28
  print_library_log, reset_local_file_directory, find_connector_object, suggest_correct_command,
@@ -43,7 +42,7 @@ from fivetran_connector_sdk.connector_helper import (
43
42
 
44
43
  # Version format: <major_version>.<minor_version>.<patch_version>
45
44
  # (where Major Version = 1 for GA, Minor Version is incremental MM from Jan 25 onwards, Patch Version is incremental within a month)
46
- __version__ = "1.7.4"
45
+ __version__ = "1.8.0"
47
46
  TESTER_VERSION = TESTER_VER
48
47
  MAX_MESSAGE_LENGTH = 32 * 1024 * 1024 # 32MB
49
48
 
@@ -376,42 +375,43 @@ class Connector(connector_sdk_pb2_grpc.SourceConnectorServicer):
376
375
  try:
377
376
  print_library_log("Initiating the 'update' method call...", Logging.Level.INFO)
378
377
 
379
- if os.environ.get(CONNECTOR_SDK_NO_YIELD_LABEL, "false").lower() == "true":
380
- def run_update():
381
- try:
382
- result = self.update_method(configuration=configuration, state=state)
383
- # If the customer's update method returns a generator (i.e., uses yield),
384
- # exhaust the generator responses, they are None. From this point on, all operations
385
- # push update_response to a queue, and we yield from the queue instead.
386
- # We return None here intentionally.
387
- if isinstance(result, GeneratorType):
388
- for _ in result:
389
- pass
390
- # If the update method doesn't use yield, skip the response returned.
391
- else:
378
+ def run_update():
379
+ try:
380
+ result = self.update_method(configuration=configuration, state=state)
381
+ # If the customer's update method returns a generator (i.e., uses yield),
382
+ # exhaust the generator responses, they are None. From this point on, all operations
383
+ # push update_response to a queue, and we yield from the queue instead.
384
+ # We return None here intentionally.
385
+ if isinstance(result, GeneratorType):
386
+ for _ in result:
392
387
  pass
393
- except Exception as exc:
394
- exception_queue.put(exc)
395
- finally:
396
- Operations.operation_stream.mark_done()
397
-
398
- thread = threading.Thread(target=run_update)
399
- thread.start()
400
-
401
- yield from Operations.operation_stream
388
+ # If the update method doesn't use yield, skip the response returned.
389
+ else:
390
+ pass
391
+ except Exception as exc:
392
+ exception_queue.put(exc)
393
+ finally:
394
+ Operations.operation_stream.mark_done()
395
+
396
+ thread = threading.Thread(target=run_update)
397
+ thread.start()
398
+
399
+ # consumer - yield the operations in the operation_stream.
400
+ for response in Operations.operation_stream:
401
+ # checkpoint call always returns list of responses.
402
+ if isinstance(response, list):
403
+ for res in response:
404
+ yield res
405
+ # checkpoint call blocks the queue (see _OperationStream.add method). unblock the queue after yielding all responses.
406
+ Operations.operation_stream.unblock()
407
+ else:
408
+ yield response
402
409
 
403
- thread.join()
410
+ thread.join()
404
411
 
405
- # Check if any exception was raised during the update
406
- if not exception_queue.empty():
407
- raise exception_queue.get()
408
- else:
409
- for update_response in self.update_method(configuration=configuration, state=state):
410
- if isinstance(update_response, list):
411
- for response in update_response:
412
- yield response
413
- else:
414
- yield update_response
412
+ # Check if any exception was raised during the update
413
+ if not exception_queue.empty():
414
+ raise exception_queue.get()
415
415
 
416
416
  except TypeError as e:
417
417
  if str(e) != "'NoneType' object is not iterable":
@@ -57,27 +57,22 @@ def get_destination_group(args):
57
57
  ft_group = get_input_from_cli("Provide the destination name (as displayed in your dashboard destination list)", env_destination_name)
58
58
  return ft_group
59
59
 
60
- def get_connection_name(args):
60
+ def get_connection_name(args, retrying=0):
61
61
  ft_connection = args.connection if args.connection else None
62
62
  env_connection_name = os.getenv('FIVETRAN_CONNECTION_NAME', None)
63
63
  if not ft_connection:
64
- for retrying in range(MAX_RETRIES):
65
- ft_connection = get_input_from_cli("Provide the connection name", env_connection_name)
66
- if not is_connection_name_valid(ft_connection):
67
- if retrying==MAX_RETRIES-1:
68
- sys.exit(1)
69
- else:
70
- print_library_log(f"Connection name: {ft_connection} is invalid!\n The connection name should start with an "
71
- f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
72
- f"letters, or digits (0-9). Uppercase characters are not allowed.", Logging.Level.SEVERE)
73
- print_library_log("Please retry...", Logging.Level.INFO)
74
- else:
75
- break
64
+ ft_connection = get_input_from_cli("Provide the connection name", env_connection_name)
76
65
  if not is_connection_name_valid(ft_connection):
77
- print_library_log(f"Connection name: {ft_connection} is invalid!\n The connection name should start with an "
78
- f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
79
- f"letters, or digits (0-9). Uppercase characters are not allowed.", Logging.Level.SEVERE)
80
- sys.exit(1)
66
+ print_library_log(
67
+ f"Connection name: {ft_connection} is invalid!\n The connection name should start with an "
68
+ f"underscore or a lowercase letter (a-z), followed by any combination of underscores, lowercase "
69
+ f"letters, or digits (0-9). Uppercase characters are not allowed.", Logging.Level.SEVERE)
70
+ args.connection = None
71
+ if retrying >= MAX_RETRIES or args.force:
72
+ sys.exit(1)
73
+ else:
74
+ print_library_log("Please retry...", Logging.Level.INFO)
75
+ return get_connection_name(args, retrying + 1)
81
76
  return ft_connection
82
77
 
83
78
  def get_api_key(args):
@@ -113,32 +108,36 @@ def get_state(args):
113
108
  state = validate_and_load_state(args, state)
114
109
  return state
115
110
 
116
- def get_configuration(args):
111
+ def get_configuration(args, retrying = 0):
117
112
  configuration = args.configuration if args.configuration else None
118
113
  env_configuration = os.getenv('FIVETRAN_CONFIGURATION', None)
119
- if not configuration and not args.force and args.command.lower() == "deploy":
120
- json_filepath = os.path.join(args.project_path, "configuration.json")
121
- if os.path.exists(json_filepath):
122
- print_library_log("configuration.json file detected in the project, "
123
- "but no configuration input provided via the command line", Logging.Level.WARNING)
124
- env_configuration = env_configuration if env_configuration else "configuration.json"
125
- confirm = input(f"Does this debug run/deploy need configuration (y/N):")
126
- if confirm.lower()=='y':
127
- for retrying in range(MAX_RETRIES):
128
- try:
129
- configuration = get_input_from_cli("Provide the configuration file path", env_configuration)
130
- config_values = validate_and_load_configuration(args.project_path, configuration)
131
- return config_values, configuration
132
- except ValueError as e:
133
- if retrying==MAX_RETRIES-1:
134
- print_library_log(f"{e}. Invalid Configuration, Exiting..", Logging.Level.WARNING)
135
- sys.exit(1)
136
- else:
137
- print_library_log(f"{e}. Please retry..", Logging.Level.INFO)
114
+ try:
115
+ if not configuration and not args.force and args.command.lower() == "deploy":
116
+ confirm = 'y'
117
+ if not retrying:
118
+ json_filepath = os.path.join(args.project_path, "configuration.json")
119
+ if os.path.exists(json_filepath):
120
+ print_library_log("configuration.json file detected in the project, "
121
+ "but no configuration input provided via the command line", Logging.Level.WARNING)
122
+ env_configuration = env_configuration if env_configuration else "configuration.json"
123
+ confirm = input(f"Does this debug run/deploy need configuration (y/N):")
124
+ if confirm.lower()=='y':
125
+ configuration = get_input_from_cli("Provide the configuration file path", env_configuration)
126
+ config_values = validate_and_load_configuration(args.project_path, configuration)
127
+ return config_values, configuration
128
+ else:
129
+ print_library_log("No input required for configuration. Continuing without configuration.", Logging.Level.INFO)
130
+ return {}, None
131
+ config_values = validate_and_load_configuration(args.project_path, configuration)
132
+ return config_values, configuration
133
+ except ValueError as e:
134
+ args.configuration = None
135
+ if retrying >= MAX_RETRIES or args.force:
136
+ print_library_log(f"{e}. Invalid Configuration, Exiting..", Logging.Level.WARNING)
137
+ sys.exit(1)
138
138
  else:
139
- print_library_log("No input required for configuration. Continuing without configuration.", Logging.Level.INFO)
140
- config_values = validate_and_load_configuration(args.project_path, configuration)
141
- return config_values, configuration
139
+ print_library_log(f"{e}. Please retry..", Logging.Level.INFO)
140
+ return get_configuration(args, retrying + 1)
142
141
 
143
142
 
144
143
  def check_newer_version(version: str):
@@ -989,8 +988,6 @@ def process_data_type(column, type):
989
988
  column.type = common_pb2.DataType.FLOAT
990
989
  elif type.upper() == "DOUBLE":
991
990
  column.type = common_pb2.DataType.DOUBLE
992
- elif type.upper() == "NAIVE_TIME":
993
- column.type = common_pb2.DataType.NAIVE_TIME
994
991
  elif type.upper() == "NAIVE_DATE":
995
992
  column.type = common_pb2.DataType.NAIVE_DATE
996
993
  elif type.upper() == "NAIVE_DATETIME":
@@ -1,6 +1,7 @@
1
+ import os
1
2
  import re
2
3
 
3
- TESTER_VER = "2.25.0701.001"
4
+ TESTER_VER = "2.25.0806.001"
4
5
 
5
6
  WIN_OS = "windows"
6
7
  ARM_64 = "arm64"
@@ -34,12 +35,16 @@ OUTPUT_FILES_DIR = "files"
34
35
  REQUIREMENTS_TXT = "requirements.txt"
35
36
  PYPI_PACKAGE_DETAILS_URL = "https://pypi.org/pypi/fivetran_connector_sdk/json"
36
37
  ONE_DAY_IN_SEC = 24 * 60 * 60
38
+ CHECKPOINT_OP_TIMEOUT_IN_SEC = 30 # seconds
37
39
  MAX_RETRIES = 3
38
40
  LOGGING_PREFIX = "Fivetran-Connector-SDK"
39
41
  LOGGING_DELIMITER = ": "
40
42
  VIRTUAL_ENV_CONFIG = "pyvenv.cfg"
41
43
  ROOT_FILENAME = "connector.py"
42
- CONNECTOR_SDK_NO_YIELD_LABEL = "CONNECTOR_SDK_NO_YIELD_APPROACH"
44
+ MAX_RECORDS_IN_BATCH = 100
45
+ MAX_BATCH_SIZE_IN_BYTES = 100000 # 100 KB
46
+ # The increased queue size shall be rolled out as Feature flag
47
+ QUEUE_SIZE = int(os.environ.get("QUEUE_SIZE", 1))
43
48
 
44
49
  # Compile patterns used in the implementation
45
50
  WORD_DASH_DOT_PATTERN = re.compile(r'^[\w.-]*$')
@@ -1,14 +1,18 @@
1
1
  import json
2
- import os
3
2
  import sys
4
3
  import queue
4
+ import threading
5
5
 
6
6
  from datetime import datetime
7
7
  from google.protobuf import timestamp_pb2
8
8
 
9
9
  from fivetran_connector_sdk.constants import (
10
10
  JAVA_LONG_MAX_VALUE,
11
- TABLES, CONNECTOR_SDK_NO_YIELD_LABEL,
11
+ TABLES,
12
+ MAX_RECORDS_IN_BATCH,
13
+ MAX_BATCH_SIZE_IN_BYTES,
14
+ QUEUE_SIZE,
15
+ CHECKPOINT_OP_TIMEOUT_IN_SEC
12
16
  )
13
17
  from fivetran_connector_sdk.helpers import (
14
18
  get_renamed_table_name,
@@ -38,8 +42,15 @@ class _OperationStream:
38
42
  """
39
43
  Initializes the operation stream with a queue and a sentinel object.
40
44
  """
41
- self._queue = queue.Queue(maxsize=1)
45
+ self._queue = queue.Queue(maxsize=QUEUE_SIZE)
42
46
  self._sentinel = object()
47
+ self._is_done = False
48
+ self._buffer = []
49
+ self._buffer_record_count = 0
50
+ self._buffer_size_bytes = 0
51
+ self._checkpoint_lock = threading.Lock()
52
+ self._checkpoint_flush_signal = threading.Event()
53
+ self._checkpoint_flush_signal.set()
43
54
 
44
55
  def __iter__(self):
45
56
  """
@@ -47,14 +58,38 @@ class _OperationStream:
47
58
  """
48
59
  return self
49
60
 
50
- def add(self, data):
61
+ def add(self, operation):
51
62
  """
52
- Adds an item to the stream.
63
+ Adds an operation to the stream. Guarantees that operations within a single thread are processed in the order.
64
+
65
+ In multithreaded environment if a thread initiates a checkpoint, it's producer is blocked until the
66
+ checkpoint flush is complete. This block is localized, other threads
67
+ remain unblocked and can continue to perform other operations
68
+ (such as upserts, updates, deletes), but they are prevented from initiating a new checkpoint
69
+ until the existing one is finished.
53
70
 
54
71
  Args:
55
- data (object): The data item to add to the stream.
72
+ operation (object): The data item to add to the stream.
73
+ """
74
+ if isinstance(operation, connector_sdk_pb2.Checkpoint):
75
+ # lock to ensure checkpoint operations are processed one at a time
76
+ with self._checkpoint_lock:
77
+ # clear the signal to indicate checkpoint operation is being processed.
78
+ self._checkpoint_flush_signal.clear()
79
+ self._queue.put(operation)
80
+ # wait until the consumer flushes the buffer and sets the flag.
81
+ if not self._checkpoint_flush_signal.wait(CHECKPOINT_OP_TIMEOUT_IN_SEC):
82
+ raise TimeoutError(
83
+ "Checkpoint flush timed out. Consumer may have failed to process checkpoint."
84
+ )
85
+ else:
86
+ self._queue.put(operation)
87
+
88
+ def unblock(self):
89
+ """
90
+ Unblocks the queue, called by consumer after the checkpoint flush is completed.
56
91
  """
57
- self._queue.put(data)
92
+ self._checkpoint_flush_signal.set()
58
93
 
59
94
  def mark_done(self):
60
95
  """
@@ -72,10 +107,76 @@ class _OperationStream:
72
107
  Raises:
73
108
  StopIteration: If the sentinel object is encountered.
74
109
  """
75
- item = self._queue.get()
76
- if item is self._sentinel:
110
+ # If stream is completed and buffer is empty, raise StopIteration. Else flush the buffer.
111
+ if self._is_done and not self._buffer:
77
112
  raise StopIteration
78
- return item
113
+
114
+ if self._is_done:
115
+ return self._flush_buffer()
116
+
117
+ return self._build_next_batch()
118
+
119
+ def _build_next_batch(self):
120
+ """
121
+ Core logic to build the batch. The loop continues until the buffer is full,
122
+ but can be interrupted by a checkpoint or a sentinel from the producer.
123
+
124
+ Returns:
125
+ connector_sdk_pb2.UpdateResponse or list[connector_sdk_pb2.UpdateResponse]: Either a single response
126
+ containing records or checkpoint, or a list of responses when flushing data with a checkpoint.
127
+
128
+ """
129
+ while self._buffer_record_count < MAX_RECORDS_IN_BATCH and self._buffer_size_bytes < MAX_BATCH_SIZE_IN_BYTES:
130
+ operation = self._queue.get()
131
+
132
+ # Case 1: If operation is sentinel, mark the stream as done, flush the buffer.
133
+ if operation is self._sentinel:
134
+ self._is_done = True
135
+ if self._buffer:
136
+ return self._flush_buffer()
137
+ else:
138
+ raise StopIteration
139
+
140
+ # Case 2: if operation is a Checkpoint, flush the buffer and send the checkpoint.
141
+ elif isinstance(operation, connector_sdk_pb2.Checkpoint):
142
+ return self._flush_buffer_on_checkpoint(operation)
143
+
144
+ # it is record, buffer it to flush in batches
145
+ self._buffer_record_count += 1
146
+ self._buffer_size_bytes += len(operation.SerializeToString())
147
+ self._buffer.append(operation)
148
+
149
+ # Case 3: If buffer size limit is reached, flush the buffer and return the response.
150
+ return self._flush_buffer()
151
+
152
+ def _flush_buffer_on_checkpoint(self, checkpoint: connector_sdk_pb2.Checkpoint):
153
+ """
154
+ Creates the responses containing the checkpoint and buffered records.
155
+
156
+ Args:
157
+ checkpoint (object): Checkpoint operation to be added to the response.
158
+ """
159
+ responses = []
160
+
161
+ if self._buffer:
162
+ responses.append(self._flush_buffer())
163
+
164
+ responses.append(connector_sdk_pb2.UpdateResponse(checkpoint=checkpoint))
165
+ return responses
166
+
167
+ def _flush_buffer(self):
168
+ """
169
+ Flushes the current buffer and returns a response containing the buffered records.
170
+
171
+ Returns:
172
+ connector_sdk_pb2.UpdateResponse: A response containing the buffered records.
173
+ """
174
+ batch_to_flush = self._buffer
175
+ self._buffer = []
176
+ self._buffer_record_count = 0
177
+ self._buffer_size_bytes = 0
178
+ return connector_sdk_pb2.UpdateResponse(records=connector_sdk_pb2.Records(records=batch_to_flush))
179
+
79
180
 
80
181
  _LOG_DATA_TYPE_INFERENCE = {
81
182
  "boolean": True,
@@ -85,7 +186,6 @@ _LOG_DATA_TYPE_INFERENCE = {
85
186
 
86
187
  class Operations:
87
188
  operation_stream = _OperationStream()
88
- use_no_yield_approach = os.environ.get(CONNECTOR_SDK_NO_YIELD_LABEL, "false").lower() == "true"
89
189
 
90
190
  @staticmethod
91
191
  def upsert(table: str, data: dict):
@@ -115,14 +215,8 @@ class Operations:
115
215
  type=common_pb2.RecordType.UPSERT,
116
216
  data=mapped_data
117
217
  )
118
- update_response = connector_sdk_pb2.UpdateResponse(record=record)
119
-
120
- if Operations.use_no_yield_approach:
121
- Operations.operation_stream.add(update_response)
122
- return None
123
- else:
124
- return [update_response]
125
218
 
219
+ Operations.operation_stream.add(record)
126
220
 
127
221
  @staticmethod
128
222
  def update(table: str, modified: dict):
@@ -145,13 +239,7 @@ class Operations:
145
239
  data=mapped_data
146
240
  )
147
241
 
148
- update_response = connector_sdk_pb2.UpdateResponse(record=record)
149
-
150
- if Operations.use_no_yield_approach:
151
- Operations.operation_stream.add(update_response)
152
- return None
153
- else:
154
- return update_response
242
+ Operations.operation_stream.add(record)
155
243
 
156
244
  @staticmethod
157
245
  def delete(table: str, keys: dict):
@@ -174,13 +262,7 @@ class Operations:
174
262
  data=mapped_data
175
263
  )
176
264
 
177
- update_response = connector_sdk_pb2.UpdateResponse(record=record)
178
-
179
- if Operations.use_no_yield_approach:
180
- Operations.operation_stream.add(update_response)
181
- return None
182
- else:
183
- return update_response
265
+ Operations.operation_stream.add(record)
184
266
 
185
267
  @staticmethod
186
268
  def checkpoint(state: dict):
@@ -203,14 +285,9 @@ class Operations:
203
285
  Returns:
204
286
  connector_sdk_pb2.UpdateResponse: The checkpoint response.
205
287
  """
206
- update_response = connector_sdk_pb2.UpdateResponse(checkpoint=connector_sdk_pb2.Checkpoint(state_json=json.dumps(state)))
207
-
208
- if Operations.use_no_yield_approach:
209
- Operations.operation_stream.add(update_response)
210
- return None
211
- else:
212
- return update_response
288
+ checkpoint = connector_sdk_pb2.Checkpoint(state_json=json.dumps(state))
213
289
 
290
+ Operations.operation_stream.add(checkpoint)
214
291
 
215
292
  def _get_columns(table: str) -> dict:
216
293
  """Retrieves the columns for the specified table.
@@ -352,4 +429,3 @@ def _yield_check(stack):
352
429
  # This should never happen
353
430
  raise RuntimeError(
354
431
  f"The '{called_method}' function is missing in the connector calling code '{calling_code}'. Please ensure that the '{called_method}' function is properly defined in your code to proceed. Reference: https://fivetran.com/docs/connectors/connector-sdk/technical-reference#technicaldetailsmethods")
355
-
@@ -26,7 +26,7 @@ _sym_db = _symbol_database.Default()
26
26
  from fivetran_connector_sdk import common_pb2 as common__pb2
27
27
 
28
28
 
29
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x63onnector_sdk.proto\x12\x0f\x66ivetran_sdk.v2\x1a\x0c\x63ommon.proto\"\x8f\x01\n\rSchemaRequest\x12H\n\rconfiguration\x18\x01 \x03(\x0b\x32\x31.fivetran_sdk.v2.SchemaRequest.ConfigurationEntry\x1a\x34\n\x12\x43onfigurationEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf1\x01\n\x0eSchemaResponse\x12\'\n\x1dschema_response_not_supported\x18\x01 \x01(\x08H\x00\x12\x32\n\x0bwith_schema\x18\x02 \x01(\x0b\x32\x1b.fivetran_sdk.v2.SchemaListH\x00\x12\x34\n\x0ewithout_schema\x18\x03 \x01(\x0b\x32\x1a.fivetran_sdk.v2.TableListH\x00\x12$\n\x17selection_not_supported\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\n\n\x08responseB\x1a\n\x18_selection_not_supported\"\xf9\x01\n\rUpdateRequest\x12H\n\rconfiguration\x18\x01 \x03(\x0b\x32\x31.fivetran_sdk.v2.UpdateRequest.ConfigurationEntry\x12\x32\n\tselection\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.SelectionH\x00\x88\x01\x01\x12\x17\n\nstate_json\x18\x03 \x01(\tH\x01\x88\x01\x01\x1a\x34\n\x12\x43onfigurationEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_selectionB\r\n\x0b_state_json\"\x91\x01\n\tSelection\x12=\n\x0ewithout_schema\x18\x01 \x01(\x0b\x32#.fivetran_sdk.v2.TablesWithNoSchemaH\x00\x12\x38\n\x0bwith_schema\x18\x02 \x01(\x0b\x32!.fivetran_sdk.v2.TablesWithSchemaH\x00\x42\x0b\n\tselection\"a\n\x12TablesWithNoSchema\x12/\n\x06tables\x18\x01 \x03(\x0b\x32\x1f.fivetran_sdk.v2.TableSelection\x12\x1a\n\x12include_new_tables\x18\x02 \x01(\x08\"b\n\x10TablesWithSchema\x12\x31\n\x07schemas\x18\x01 \x03(\x0b\x32 .fivetran_sdk.v2.SchemaSelection\x12\x1b\n\x13include_new_schemas\x18\x02 \x01(\x08\"\x85\x01\n\x0fSchemaSelection\x12\x10\n\x08included\x18\x01 \x01(\x08\x12\x13\n\x0bschema_name\x18\x02 \x01(\t\x12/\n\x06tables\x18\x03 \x03(\x0b\x32\x1f.fivetran_sdk.v2.TableSelection\x12\x1a\n\x12include_new_tables\x18\x04 \x01(\x08\"\xc2\x01\n\x0eTableSelection\x12\x10\n\x08included\x18\x01 \x01(\x08\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12=\n\x07\x63olumns\x18\x03 \x03(\x0b\x32,.fivetran_sdk.v2.TableSelection.ColumnsEntry\x12\x1b\n\x13include_new_columns\x18\x04 \x01(\x08\x1a.\n\x0c\x43olumnsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"\x87\x02\n\x0eUpdateResponse\x12)\n\x06record\x18\x01 \x01(\x0b\x32\x17.fivetran_sdk.v2.RecordH\x00\x12\x36\n\rschema_change\x18\x02 \x01(\x0b\x32\x1d.fivetran_sdk.v2.SchemaChangeH\x00\x12\x31\n\ncheckpoint\x18\x03 \x01(\x0b\x32\x1b.fivetran_sdk.v2.CheckpointH\x00\x12+\n\x07warning\x18\x04 \x01(\x0b\x32\x18.fivetran_sdk.v2.WarningH\x00\x12%\n\x04task\x18\x05 \x01(\x0b\x32\x15.fivetran_sdk.v2.TaskH\x00\x42\x0b\n\toperation\"\x82\x01\n\x0cSchemaChange\x12\x32\n\x0bwith_schema\x18\x01 \x01(\x0b\x32\x1b.fivetran_sdk.v2.SchemaListH\x00\x12\x34\n\x0ewithout_schema\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.TableListH\x00\x42\x08\n\x06\x63hange\"\xeb\x01\n\x06Record\x12\x18\n\x0bschema_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.fivetran_sdk.v2.RecordType\x12/\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32!.fivetran_sdk.v2.Record.DataEntry\x1aG\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.ValueType:\x02\x38\x01\x42\x0e\n\x0c_schema_name\" \n\nCheckpoint\x12\x12\n\nstate_json\x18\x01 \x01(\t2\xe2\x02\n\x0fSourceConnector\x12l\n\x11\x43onfigurationForm\x12).fivetran_sdk.v2.ConfigurationFormRequest\x1a*.fivetran_sdk.v2.ConfigurationFormResponse\"\x00\x12\x45\n\x04Test\x12\x1c.fivetran_sdk.v2.TestRequest\x1a\x1d.fivetran_sdk.v2.TestResponse\"\x00\x12K\n\x06Schema\x12\x1e.fivetran_sdk.v2.SchemaRequest\x1a\x1f.fivetran_sdk.v2.SchemaResponse\"\x00\x12M\n\x06Update\x12\x1e.fivetran_sdk.v2.UpdateRequest\x1a\x1f.fivetran_sdk.v2.UpdateResponse\"\x00\x30\x01\x42\"H\x01P\x01Z\x1c\x66ivetran.com/fivetran_sdk_v2b\x06proto3')
29
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x63onnector_sdk.proto\x12\x0f\x66ivetran_sdk.v2\x1a\x0c\x63ommon.proto\"\x8f\x01\n\rSchemaRequest\x12H\n\rconfiguration\x18\x01 \x03(\x0b\x32\x31.fivetran_sdk.v2.SchemaRequest.ConfigurationEntry\x1a\x34\n\x12\x43onfigurationEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf1\x01\n\x0eSchemaResponse\x12\'\n\x1dschema_response_not_supported\x18\x01 \x01(\x08H\x00\x12\x32\n\x0bwith_schema\x18\x02 \x01(\x0b\x32\x1b.fivetran_sdk.v2.SchemaListH\x00\x12\x34\n\x0ewithout_schema\x18\x03 \x01(\x0b\x32\x1a.fivetran_sdk.v2.TableListH\x00\x12$\n\x17selection_not_supported\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\n\n\x08responseB\x1a\n\x18_selection_not_supported\"\xf9\x01\n\rUpdateRequest\x12H\n\rconfiguration\x18\x01 \x03(\x0b\x32\x31.fivetran_sdk.v2.UpdateRequest.ConfigurationEntry\x12\x32\n\tselection\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.SelectionH\x00\x88\x01\x01\x12\x17\n\nstate_json\x18\x03 \x01(\tH\x01\x88\x01\x01\x1a\x34\n\x12\x43onfigurationEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_selectionB\r\n\x0b_state_json\"\x91\x01\n\tSelection\x12=\n\x0ewithout_schema\x18\x01 \x01(\x0b\x32#.fivetran_sdk.v2.TablesWithNoSchemaH\x00\x12\x38\n\x0bwith_schema\x18\x02 \x01(\x0b\x32!.fivetran_sdk.v2.TablesWithSchemaH\x00\x42\x0b\n\tselection\"a\n\x12TablesWithNoSchema\x12/\n\x06tables\x18\x01 \x03(\x0b\x32\x1f.fivetran_sdk.v2.TableSelection\x12\x1a\n\x12include_new_tables\x18\x02 \x01(\x08\"b\n\x10TablesWithSchema\x12\x31\n\x07schemas\x18\x01 \x03(\x0b\x32 .fivetran_sdk.v2.SchemaSelection\x12\x1b\n\x13include_new_schemas\x18\x02 \x01(\x08\"\x85\x01\n\x0fSchemaSelection\x12\x10\n\x08included\x18\x01 \x01(\x08\x12\x13\n\x0bschema_name\x18\x02 \x01(\t\x12/\n\x06tables\x18\x03 \x03(\x0b\x32\x1f.fivetran_sdk.v2.TableSelection\x12\x1a\n\x12include_new_tables\x18\x04 \x01(\x08\"\xc2\x01\n\x0eTableSelection\x12\x10\n\x08included\x18\x01 \x01(\x08\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12=\n\x07\x63olumns\x18\x03 \x03(\x0b\x32,.fivetran_sdk.v2.TableSelection.ColumnsEntry\x12\x1b\n\x13include_new_columns\x18\x04 \x01(\x08\x1a.\n\x0c\x43olumnsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"\xb4\x02\n\x0eUpdateResponse\x12)\n\x06record\x18\x01 \x01(\x0b\x32\x17.fivetran_sdk.v2.RecordH\x00\x12\x36\n\rschema_change\x18\x02 \x01(\x0b\x32\x1d.fivetran_sdk.v2.SchemaChangeH\x00\x12\x31\n\ncheckpoint\x18\x03 \x01(\x0b\x32\x1b.fivetran_sdk.v2.CheckpointH\x00\x12+\n\x07warning\x18\x04 \x01(\x0b\x32\x18.fivetran_sdk.v2.WarningH\x00\x12%\n\x04task\x18\x05 \x01(\x0b\x32\x15.fivetran_sdk.v2.TaskH\x00\x12+\n\x07records\x18\x06 \x01(\x0b\x32\x18.fivetran_sdk.v2.RecordsH\x00\x42\x0b\n\toperation\"\x82\x01\n\x0cSchemaChange\x12\x32\n\x0bwith_schema\x18\x01 \x01(\x0b\x32\x1b.fivetran_sdk.v2.SchemaListH\x00\x12\x34\n\x0ewithout_schema\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.TableListH\x00\x42\x08\n\x06\x63hange\"3\n\x07Records\x12(\n\x07records\x18\x01 \x03(\x0b\x32\x17.fivetran_sdk.v2.Record\"\xeb\x01\n\x06Record\x12\x18\n\x0bschema_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12)\n\x04type\x18\x03 \x01(\x0e\x32\x1b.fivetran_sdk.v2.RecordType\x12/\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32!.fivetran_sdk.v2.Record.DataEntry\x1aG\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.fivetran_sdk.v2.ValueType:\x02\x38\x01\x42\x0e\n\x0c_schema_name\" \n\nCheckpoint\x12\x12\n\nstate_json\x18\x01 \x01(\t2\xe2\x02\n\x0fSourceConnector\x12l\n\x11\x43onfigurationForm\x12).fivetran_sdk.v2.ConfigurationFormRequest\x1a*.fivetran_sdk.v2.ConfigurationFormResponse\"\x00\x12\x45\n\x04Test\x12\x1c.fivetran_sdk.v2.TestRequest\x1a\x1d.fivetran_sdk.v2.TestResponse\"\x00\x12K\n\x06Schema\x12\x1e.fivetran_sdk.v2.SchemaRequest\x1a\x1f.fivetran_sdk.v2.SchemaResponse\"\x00\x12M\n\x06Update\x12\x1e.fivetran_sdk.v2.UpdateRequest\x1a\x1f.fivetran_sdk.v2.UpdateResponse\"\x00\x30\x01\x42\"H\x01P\x01Z\x1c\x66ivetran.com/fivetran_sdk_v2b\x06proto3')
30
30
 
31
31
  _globals = globals()
32
32
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -65,15 +65,17 @@ if not _descriptor._USE_C_DESCRIPTORS:
65
65
  _globals['_TABLESELECTION_COLUMNSENTRY']._serialized_start=1328
66
66
  _globals['_TABLESELECTION_COLUMNSENTRY']._serialized_end=1374
67
67
  _globals['_UPDATERESPONSE']._serialized_start=1377
68
- _globals['_UPDATERESPONSE']._serialized_end=1640
69
- _globals['_SCHEMACHANGE']._serialized_start=1643
70
- _globals['_SCHEMACHANGE']._serialized_end=1773
71
- _globals['_RECORD']._serialized_start=1776
72
- _globals['_RECORD']._serialized_end=2011
73
- _globals['_RECORD_DATAENTRY']._serialized_start=1924
74
- _globals['_RECORD_DATAENTRY']._serialized_end=1995
75
- _globals['_CHECKPOINT']._serialized_start=2013
76
- _globals['_CHECKPOINT']._serialized_end=2045
77
- _globals['_SOURCECONNECTOR']._serialized_start=2048
78
- _globals['_SOURCECONNECTOR']._serialized_end=2402
68
+ _globals['_UPDATERESPONSE']._serialized_end=1685
69
+ _globals['_SCHEMACHANGE']._serialized_start=1688
70
+ _globals['_SCHEMACHANGE']._serialized_end=1818
71
+ _globals['_RECORDS']._serialized_start=1820
72
+ _globals['_RECORDS']._serialized_end=1871
73
+ _globals['_RECORD']._serialized_start=1874
74
+ _globals['_RECORD']._serialized_end=2109
75
+ _globals['_RECORD_DATAENTRY']._serialized_start=2022
76
+ _globals['_RECORD_DATAENTRY']._serialized_end=2093
77
+ _globals['_CHECKPOINT']._serialized_start=2111
78
+ _globals['_CHECKPOINT']._serialized_end=2143
79
+ _globals['_SOURCECONNECTOR']._serialized_start=2146
80
+ _globals['_SOURCECONNECTOR']._serialized_end=2500
79
81
  # @@protoc_insertion_point(module_scope)
@@ -104,18 +104,20 @@ class TableSelection(_message.Message):
104
104
  def __init__(self, included: bool = ..., table_name: _Optional[str] = ..., columns: _Optional[_Mapping[str, bool]] = ..., include_new_columns: bool = ...) -> None: ...
105
105
 
106
106
  class UpdateResponse(_message.Message):
107
- __slots__ = ("record", "schema_change", "checkpoint", "warning", "task")
107
+ __slots__ = ("record", "schema_change", "checkpoint", "warning", "task", "records")
108
108
  RECORD_FIELD_NUMBER: _ClassVar[int]
109
109
  SCHEMA_CHANGE_FIELD_NUMBER: _ClassVar[int]
110
110
  CHECKPOINT_FIELD_NUMBER: _ClassVar[int]
111
111
  WARNING_FIELD_NUMBER: _ClassVar[int]
112
112
  TASK_FIELD_NUMBER: _ClassVar[int]
113
+ RECORDS_FIELD_NUMBER: _ClassVar[int]
113
114
  record: Record
114
115
  schema_change: SchemaChange
115
116
  checkpoint: Checkpoint
116
117
  warning: _common_pb2.Warning
117
118
  task: _common_pb2.Task
118
- def __init__(self, record: _Optional[_Union[Record, _Mapping]] = ..., schema_change: _Optional[_Union[SchemaChange, _Mapping]] = ..., checkpoint: _Optional[_Union[Checkpoint, _Mapping]] = ..., warning: _Optional[_Union[_common_pb2.Warning, _Mapping]] = ..., task: _Optional[_Union[_common_pb2.Task, _Mapping]] = ...) -> None: ...
119
+ records: Records
120
+ def __init__(self, record: _Optional[_Union[Record, _Mapping]] = ..., schema_change: _Optional[_Union[SchemaChange, _Mapping]] = ..., checkpoint: _Optional[_Union[Checkpoint, _Mapping]] = ..., warning: _Optional[_Union[_common_pb2.Warning, _Mapping]] = ..., task: _Optional[_Union[_common_pb2.Task, _Mapping]] = ..., records: _Optional[_Union[Records, _Mapping]] = ...) -> None: ...
119
121
 
120
122
  class SchemaChange(_message.Message):
121
123
  __slots__ = ("with_schema", "without_schema")
@@ -125,6 +127,12 @@ class SchemaChange(_message.Message):
125
127
  without_schema: _common_pb2.TableList
126
128
  def __init__(self, with_schema: _Optional[_Union[_common_pb2.SchemaList, _Mapping]] = ..., without_schema: _Optional[_Union[_common_pb2.TableList, _Mapping]] = ...) -> None: ...
127
129
 
130
+ class Records(_message.Message):
131
+ __slots__ = ("records",)
132
+ RECORDS_FIELD_NUMBER: _ClassVar[int]
133
+ records: _containers.RepeatedCompositeFieldContainer[Record]
134
+ def __init__(self, records: _Optional[_Iterable[_Union[Record, _Mapping]]] = ...) -> None: ...
135
+
128
136
  class Record(_message.Message):
129
137
  __slots__ = ("schema_name", "table_name", "type", "data")
130
138
  class DataEntry(_message.Message):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fivetran_connector_sdk
3
- Version: 1.7.4
3
+ Version: 1.8.0
4
4
  Summary: Build custom connectors on Fivetran platform
5
5
  Author-email: Fivetran <developers@fivetran.com>
6
6
  Project-URL: Homepage, https://fivetran.com/docs/connectors/connector-sdk
@@ -0,0 +1,18 @@
1
+ fivetran_connector_sdk/__init__.py,sha256=d_UKwca8fwy4nMGDLw655ISzjlC6KGV-z69gHZ_FiIU,22526
2
+ fivetran_connector_sdk/connector_helper.py,sha256=hTEa_plE5xjPM0RFe9AziiJah5iGgO1oKGAGkibTyRo,43065
3
+ fivetran_connector_sdk/constants.py,sha256=3vFpdoWGOSzcaw29P-GXQyJsSvKQgLcXU_0qdZrWQ0Y,2529
4
+ fivetran_connector_sdk/helpers.py,sha256=k_iBaRacPN3YkOkZ8bLuflNYXkUrtuj6fYH_rV1M-RI,15224
5
+ fivetran_connector_sdk/logger.py,sha256=ud8v8-mKx65OAPaZvxBqt2-CU0vjgBeiYwuiqsYh_hA,3063
6
+ fivetran_connector_sdk/operations.py,sha256=9g-Eiy82qVHJGKfGHuIb1IZvVF9LbX_Kc1BldEs7BD0,16834
7
+ fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ fivetran_connector_sdk/protos/common_pb2.py,sha256=zkzs6Rd-lvsev6Nsq37xc4HLJZ_uNXPkotCLY7Y7i5U,8770
9
+ fivetran_connector_sdk/protos/common_pb2.pyi,sha256=FdqlPKRqiXdUDT3e7adP5X42_Qzv_ItydUNJFKnJJIE,11478
10
+ fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=qni6h6BoA1nwJXr2bNtznfTkrMokzzeQd8XQMZIiZUc,887
11
+ fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=Inv87MlK5Q56GNvMNFQHyqIePDMKnkW9y_BrT9DgPck,7835
12
+ fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=3AC-bK6ZM-Bmr_RETOB3y_0u4ATWlwcbHzqVanDuOB0,8115
13
+ fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=bGlvc_vGwA9-FTqrj-BYlVcA-7jS8A9MSZ-XpZFytvY,8795
14
+ fivetran_connector_sdk-1.8.0.dist-info/METADATA,sha256=_nmaaVX4rjtTGTlQL6XENjWm32oaZRK4HjP8PW0EgOA,3188
15
+ fivetran_connector_sdk-1.8.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ fivetran_connector_sdk-1.8.0.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
17
+ fivetran_connector_sdk-1.8.0.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
18
+ fivetran_connector_sdk-1.8.0.dist-info/RECORD,,
@@ -1,18 +0,0 @@
1
- fivetran_connector_sdk/__init__.py,sha256=2PC01krj_1210qy5n-_Gn7U6QZ35E9lbXpmf1CWlWMI,22573
2
- fivetran_connector_sdk/connector_helper.py,sha256=NDaaftRVsyEkdLFE8W59HFYpxfPsXZfIc1c0vTe03NI,43516
3
- fivetran_connector_sdk/constants.py,sha256=LBe_DOA1P8QLvqUDrMeNd-mdHWArTxKkbAhSp7zQD8w,2358
4
- fivetran_connector_sdk/helpers.py,sha256=k_iBaRacPN3YkOkZ8bLuflNYXkUrtuj6fYH_rV1M-RI,15224
5
- fivetran_connector_sdk/logger.py,sha256=ud8v8-mKx65OAPaZvxBqt2-CU0vjgBeiYwuiqsYh_hA,3063
6
- fivetran_connector_sdk/operations.py,sha256=yFhUqhMvbiHTZd3EPZXQuypwNGmyT-2yb0I8L6aDfkc,13288
7
- fivetran_connector_sdk/protos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- fivetran_connector_sdk/protos/common_pb2.py,sha256=zkzs6Rd-lvsev6Nsq37xc4HLJZ_uNXPkotCLY7Y7i5U,8770
9
- fivetran_connector_sdk/protos/common_pb2.pyi,sha256=FdqlPKRqiXdUDT3e7adP5X42_Qzv_ItydUNJFKnJJIE,11478
10
- fivetran_connector_sdk/protos/common_pb2_grpc.py,sha256=qni6h6BoA1nwJXr2bNtznfTkrMokzzeQd8XQMZIiZUc,887
11
- fivetran_connector_sdk/protos/connector_sdk_pb2.py,sha256=qbce2wyScUg4cYRRjuYMgi5p0vb1zEA9Jf58polYhhs,7589
12
- fivetran_connector_sdk/protos/connector_sdk_pb2.pyi,sha256=aE7DlQU3ZpuHK9aZrd1_cYs2_4Rl1lqSMw54BIXvYys,7721
13
- fivetran_connector_sdk/protos/connector_sdk_pb2_grpc.py,sha256=bGlvc_vGwA9-FTqrj-BYlVcA-7jS8A9MSZ-XpZFytvY,8795
14
- fivetran_connector_sdk-1.7.4.dist-info/METADATA,sha256=YZNVl5viNASXjyCMbu4bZtEKw0pDJzKiwfc1OVM-Jws,3188
15
- fivetran_connector_sdk-1.7.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- fivetran_connector_sdk-1.7.4.dist-info/entry_points.txt,sha256=uQn0KPnFlQmXJfxlk0tifdNsSXWfVlnAFzNqjXZM_xM,57
17
- fivetran_connector_sdk-1.7.4.dist-info/top_level.txt,sha256=-_xk2MFY4psIh7jw1lJePMzFb5-vask8_ZtX-UzYWUI,23
18
- fivetran_connector_sdk-1.7.4.dist-info/RECORD,,