bizon 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bizon/destinations/bigquery/src/config.py +11 -0
- bizon/destinations/bigquery/src/destination.py +39 -3
- bizon/destinations/config.py +1 -0
- bizon/destinations/destination.py +6 -1
- bizon/destinations/models.py +33 -0
- bizon/engine/pipeline/producer.py +11 -0
- bizon/engine/queue/adapters/kafka/queue.py +4 -0
- bizon/engine/queue/adapters/python_queue/queue.py +6 -0
- bizon/engine/queue/adapters/rabbitmq/queue.py +5 -0
- bizon/engine/queue/queue.py +6 -1
- bizon/source/discover.py +1 -1
- bizon/sources/kafka/src/source.py +6 -3
- bizon/sources/kafka/tests/kafka_pipeline.py +3 -1
- {bizon-0.0.8.dist-info → bizon-0.0.10.dist-info}/METADATA +1 -1
- {bizon-0.0.8.dist-info → bizon-0.0.10.dist-info}/RECORD +18 -18
- {bizon-0.0.8.dist-info → bizon-0.0.10.dist-info}/LICENSE +0 -0
- {bizon-0.0.8.dist-info → bizon-0.0.10.dist-info}/WHEEL +0 -0
- {bizon-0.0.8.dist-info → bizon-0.0.10.dist-info}/entry_points.txt +0 -0
|
@@ -15,6 +15,13 @@ class GCSBufferFormat(str, Enum):
|
|
|
15
15
|
CSV = "csv"
|
|
16
16
|
|
|
17
17
|
|
|
18
|
+
class TimePartitioning(str, Enum):
|
|
19
|
+
DAY = "DAY"
|
|
20
|
+
HOUR = "HOUR"
|
|
21
|
+
MONTH = "MONTH"
|
|
22
|
+
YEAR = "YEAR"
|
|
23
|
+
|
|
24
|
+
|
|
18
25
|
class BigQueryAuthentication(BaseModel):
|
|
19
26
|
service_account_key: str = Field(
|
|
20
27
|
description="Service Account Key JSON string. If empty it will be infered",
|
|
@@ -31,6 +38,10 @@ class BigQueryConfigDetails(AbstractDestinationDetailsConfig):
|
|
|
31
38
|
)
|
|
32
39
|
gcs_buffer_bucket: str
|
|
33
40
|
gcs_buffer_format: Optional[GCSBufferFormat] = GCSBufferFormat.PARQUET
|
|
41
|
+
|
|
42
|
+
time_partitioning: Optional[TimePartitioning] = Field(
|
|
43
|
+
default=TimePartitioning.DAY, description="BigQuery Time partitioning type"
|
|
44
|
+
)
|
|
34
45
|
authentication: Optional[BigQueryAuthentication] = None
|
|
35
46
|
|
|
36
47
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import io
|
|
2
|
+
import json
|
|
2
3
|
import os
|
|
3
4
|
import tempfile
|
|
4
5
|
from typing import List, Tuple
|
|
@@ -9,7 +10,7 @@ import pyarrow as pa
|
|
|
9
10
|
import pyarrow.parquet as pq
|
|
10
11
|
from google.api_core.exceptions import NotFound
|
|
11
12
|
from google.cloud import bigquery, storage
|
|
12
|
-
from google.cloud.bigquery import DatasetReference
|
|
13
|
+
from google.cloud.bigquery import DatasetReference, TimePartitioning
|
|
13
14
|
from loguru import logger
|
|
14
15
|
from pytz import UTC
|
|
15
16
|
|
|
@@ -70,7 +71,25 @@ class BigQueryDestination(AbstractDestination):
|
|
|
70
71
|
bigquery.SchemaField("_source_timestamp", "TIMESTAMP", mode="REQUIRED"),
|
|
71
72
|
bigquery.SchemaField("_source_data", "STRING", mode="NULLABLE"),
|
|
72
73
|
bigquery.SchemaField("_bizon_extracted_at", "TIMESTAMP", mode="REQUIRED"),
|
|
73
|
-
bigquery.SchemaField(
|
|
74
|
+
bigquery.SchemaField(
|
|
75
|
+
"_bizon_loaded_at", "TIMESTAMP", mode="REQUIRED", default_value_expression="CURRENT_TIMESTAMP()"
|
|
76
|
+
),
|
|
77
|
+
bigquery.SchemaField("_bizon_id", "STRING", mode="REQUIRED"),
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
elif self.config.normalization.type == NormalizationType.DEBEZIUM:
|
|
81
|
+
assert (
|
|
82
|
+
"_bizon_message_key" in destination_records[0].source_data
|
|
83
|
+
), "Debezium records must have a '_bizon_message_key' key"
|
|
84
|
+
message_keys = json.loads(destination_records[0].source_data["_bizon_message_key"])
|
|
85
|
+
return [bigquery.SchemaField(key, "STRING", mode="NULLABLE") for key in message_keys] + [
|
|
86
|
+
bigquery.SchemaField("_source_data", "STRING", mode="NULLABLE"),
|
|
87
|
+
bigquery.SchemaField("_source_record_id", "STRING", mode="REQUIRED"),
|
|
88
|
+
bigquery.SchemaField("_source_timestamp", "TIMESTAMP", mode="REQUIRED"),
|
|
89
|
+
bigquery.SchemaField("_bizon_extracted_at", "TIMESTAMP", mode="REQUIRED"),
|
|
90
|
+
bigquery.SchemaField(
|
|
91
|
+
"_bizon_loaded_at", "TIMESTAMP", mode="REQUIRED", default_value_expression="CURRENT_TIMESTAMP()"
|
|
92
|
+
),
|
|
74
93
|
bigquery.SchemaField("_bizon_id", "STRING", mode="REQUIRED"),
|
|
75
94
|
]
|
|
76
95
|
|
|
@@ -81,7 +100,9 @@ class BigQueryDestination(AbstractDestination):
|
|
|
81
100
|
bigquery.SchemaField("_source_record_id", "STRING", mode="REQUIRED"),
|
|
82
101
|
bigquery.SchemaField("_source_timestamp", "TIMESTAMP", mode="REQUIRED"),
|
|
83
102
|
bigquery.SchemaField("_bizon_extracted_at", "TIMESTAMP", mode="REQUIRED"),
|
|
84
|
-
bigquery.SchemaField(
|
|
103
|
+
bigquery.SchemaField(
|
|
104
|
+
"_bizon_loaded_at", "TIMESTAMP", mode="REQUIRED", default_value_expression="CURRENT_TIMESTAMP()"
|
|
105
|
+
),
|
|
85
106
|
bigquery.SchemaField("_bizon_id", "STRING", mode="REQUIRED"),
|
|
86
107
|
]
|
|
87
108
|
|
|
@@ -113,6 +134,10 @@ class BigQueryDestination(AbstractDestination):
|
|
|
113
134
|
int(record.source_timestamp.timestamp() * 1_000_000) for record in destination_records
|
|
114
135
|
]
|
|
115
136
|
|
|
137
|
+
elif self.config.normalization.type == NormalizationType.DEBEZIUM:
|
|
138
|
+
df = pd.DataFrame([record.to_dict_debezium(parquet=True) for record in destination_records])
|
|
139
|
+
df["_bizon_loaded_at"] = pd.Timestamp.now(tz=UTC)
|
|
140
|
+
|
|
116
141
|
else:
|
|
117
142
|
raise NotImplementedError(f"Normalization type {self.config.normalization.type} is not supported")
|
|
118
143
|
|
|
@@ -155,15 +180,26 @@ class BigQueryDestination(AbstractDestination):
|
|
|
155
180
|
# https://cloud.google.com/python/docs/reference/bigquery/latest/google.cloud.bigquery.dbapi.DataError
|
|
156
181
|
|
|
157
182
|
def load_to_bigquery(self, gcs_file: str, destination_records: List[DestinationRecord]):
|
|
183
|
+
|
|
184
|
+
# We always partition by the loaded_at field
|
|
185
|
+
time_partitioning = TimePartitioning(field="_bizon_loaded_at", type_=self.config.time_partitioning)
|
|
186
|
+
|
|
158
187
|
job_config = bigquery.LoadJobConfig(
|
|
159
188
|
source_format=bigquery.SourceFormat.PARQUET,
|
|
160
189
|
write_disposition=bigquery.WriteDisposition.WRITE_APPEND,
|
|
161
190
|
schema=self.get_bigquery_schema(destination_records=destination_records),
|
|
191
|
+
time_partitioning=time_partitioning,
|
|
162
192
|
)
|
|
163
193
|
|
|
194
|
+
if self.config.normalization.type == NormalizationType.DEBEZIUM:
|
|
195
|
+
job_config.clustering_fields = list(
|
|
196
|
+
json.loads(destination_records[0].source_data["_bizon_message_key"]).keys()
|
|
197
|
+
)
|
|
198
|
+
|
|
164
199
|
load_job = self.bq_client.load_table_from_uri(
|
|
165
200
|
f"gs://{self.buffer_bucket_name}/{gcs_file}", self.temp_table_id, job_config=job_config
|
|
166
201
|
)
|
|
202
|
+
|
|
167
203
|
load_job.result()
|
|
168
204
|
|
|
169
205
|
def write_records(self, destination_records: List[DestinationRecord]) -> Tuple[bool, str]:
|
bizon/destinations/config.py
CHANGED
|
@@ -13,6 +13,7 @@ class DestinationTypes(str, Enum):
|
|
|
13
13
|
class NormalizationType(str, Enum):
|
|
14
14
|
TABULAR = "tabular" # Parse key / value pairs to columns
|
|
15
15
|
NONE = "none" # No normalization, raw data is stored
|
|
16
|
+
DEBEZIUM = "debezium" # Debezium normalization
|
|
16
17
|
|
|
17
18
|
|
|
18
19
|
class NormalizationConfig(BaseModel):
|
|
@@ -85,11 +85,16 @@ class AbstractDestination(ABC):
|
|
|
85
85
|
pagination=self.buffer.pagination,
|
|
86
86
|
)
|
|
87
87
|
|
|
88
|
+
logger.info(
|
|
89
|
+
f"Writing in destination from source iteration {self.buffer.from_iteration} to {self.buffer.to_iteration}"
|
|
90
|
+
)
|
|
91
|
+
|
|
88
92
|
success, error_msg = self.write_records(destination_records=self.buffer.records)
|
|
89
93
|
|
|
90
94
|
if success:
|
|
91
95
|
# We wrote records to destination so we keep it
|
|
92
96
|
destination_iteration.records_written = len(self.buffer.records)
|
|
97
|
+
logger.info(f"Successfully wrote {destination_iteration.records_written} records to destination")
|
|
93
98
|
|
|
94
99
|
else:
|
|
95
100
|
# We failed to write records to destination so we keep the error message
|
|
@@ -160,7 +165,7 @@ class AbstractDestination(ABC):
|
|
|
160
165
|
logger.debug(f"Buffer free space {self.buffer.buffer_free_space_pct}%")
|
|
161
166
|
logger.debug(f"Buffer current size {self.buffer.current_size} bytes")
|
|
162
167
|
logger.info(
|
|
163
|
-
f"Buffer ripeness {self.buffer.ripeness / 60} min. Max ripeness {self.buffer.buffer_flush_timeout / 60} min." # noqa
|
|
168
|
+
f"Buffer ripeness {round(self.buffer.ripeness / 60, 2)} min. Max ripeness {round(self.buffer.buffer_flush_timeout / 60, 2)} min." # noqa
|
|
164
169
|
)
|
|
165
170
|
|
|
166
171
|
# Write buffer to destination if buffer is ripe and create a new buffer for the new iteration
|
bizon/destinations/models.py
CHANGED
|
@@ -27,6 +27,39 @@ class DestinationRecord(BaseModel):
|
|
|
27
27
|
source_data=source_record.data,
|
|
28
28
|
)
|
|
29
29
|
|
|
30
|
+
def to_dict_debezium(self, parquet: bool = False) -> dict:
|
|
31
|
+
"""Return the record as a dict with Debezium data"""
|
|
32
|
+
|
|
33
|
+
# Extract keys from Debezium message key and unnest
|
|
34
|
+
parsed_debezium_keys = json.loads(self.source_data["_bizon_message_key"])
|
|
35
|
+
|
|
36
|
+
# Parse Debezium Operation and deleted record
|
|
37
|
+
if self.source_data.get("op") == "d":
|
|
38
|
+
parsed_source_data = {"__deleted": True, **self.source_data["before"]}
|
|
39
|
+
else:
|
|
40
|
+
parsed_source_data = {"__deleted": False, **self.source_data["after"]}
|
|
41
|
+
|
|
42
|
+
if parquet:
|
|
43
|
+
return {
|
|
44
|
+
**{k: str(v) for k, v in parsed_debezium_keys.items()},
|
|
45
|
+
"_bizon_id": self.bizon_id,
|
|
46
|
+
"_bizon_extracted_at": int(self.bizon_extracted_at.timestamp() * 1_000_000),
|
|
47
|
+
"_bizon_loaded_at": self.bizon_loaded_at.timestamp(),
|
|
48
|
+
"_source_record_id": self.source_record_id,
|
|
49
|
+
"_source_timestamp": int(self.source_timestamp.timestamp() * 1_000_000),
|
|
50
|
+
"_source_data": json.dumps(parsed_source_data),
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
**{k: str(v) for k, v in parsed_debezium_keys.items()},
|
|
55
|
+
"_bizon_id": self.bizon_id,
|
|
56
|
+
"_bizon_extracted_at": self.bizon_extracted_at,
|
|
57
|
+
"_bizon_loaded_at": self.bizon_loaded_at,
|
|
58
|
+
"_source_record_id": self.source_record_id,
|
|
59
|
+
"_source_timestamp": self.source_timestamp,
|
|
60
|
+
"_source_data": json.dumps(parsed_source_data),
|
|
61
|
+
}
|
|
62
|
+
|
|
30
63
|
def to_dict_raw_json_data(self, parquet: bool = False) -> str:
|
|
31
64
|
"""Return the record as a dict with raw JSON data"""
|
|
32
65
|
|
|
@@ -104,6 +104,8 @@ class Producer:
|
|
|
104
104
|
|
|
105
105
|
while not cursor.is_finished:
|
|
106
106
|
|
|
107
|
+
timestamp_start_iteration = datetime.now(tz=UTC)
|
|
108
|
+
|
|
107
109
|
# Handle the case where last cursor already reach max_iterations
|
|
108
110
|
terminate = self.handle_max_iterations(cursor)
|
|
109
111
|
if terminate:
|
|
@@ -178,6 +180,15 @@ class Producer:
|
|
|
178
180
|
return_value = PipelineReturnStatus.SOURCE_ERROR
|
|
179
181
|
break
|
|
180
182
|
|
|
183
|
+
# Items in queue
|
|
184
|
+
items_in_queue = f"{self.queue.get_size()} items in queue." if self.queue.get_size() else ""
|
|
185
|
+
|
|
186
|
+
logger.info(
|
|
187
|
+
(
|
|
188
|
+
f"Iteration {cursor.iteration} finished in {datetime.now(tz=UTC) - timestamp_start_iteration}. {items_in_queue}"
|
|
189
|
+
)
|
|
190
|
+
)
|
|
191
|
+
|
|
181
192
|
logger.info("Terminating destination ...")
|
|
182
193
|
|
|
183
194
|
try:
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
+
from typing import Union
|
|
2
3
|
|
|
3
4
|
from kafka import KafkaProducer
|
|
4
5
|
from loguru import logger
|
|
@@ -36,6 +37,9 @@ class KafkaQueue(AbstractQueue):
|
|
|
36
37
|
def on_error(e):
|
|
37
38
|
logger.error(f"Error sending message: {e}")
|
|
38
39
|
|
|
40
|
+
def get_size(self) -> Union[int, None]:
|
|
41
|
+
return None
|
|
42
|
+
|
|
39
43
|
def put_queue_message(self, queue_message: QueueMessage):
|
|
40
44
|
future = self.producer.send(
|
|
41
45
|
topic=self.config.queue.topic,
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import random
|
|
2
2
|
import time
|
|
3
3
|
from multiprocessing import Queue
|
|
4
|
+
from typing import Union
|
|
4
5
|
|
|
5
6
|
from loguru import logger
|
|
6
7
|
|
|
@@ -52,6 +53,11 @@ class PythonQueue(AbstractQueue):
|
|
|
52
53
|
time.sleep(random.random())
|
|
53
54
|
return self.get()
|
|
54
55
|
|
|
56
|
+
def get_size(self) -> Union[int, None]:
|
|
57
|
+
if hasattr(self.queue, "qsize"):
|
|
58
|
+
return self.queue.qsize()
|
|
59
|
+
return None
|
|
60
|
+
|
|
55
61
|
def terminate(self, iteration: int) -> bool:
|
|
56
62
|
self.put(source_records=[], iteration=iteration, signal=QUEUE_TERMINATION)
|
|
57
63
|
logger.info("Sent termination signal to destination.")
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
|
|
1
3
|
import pika
|
|
2
4
|
from loguru import logger
|
|
3
5
|
|
|
@@ -31,6 +33,9 @@ class RabbitMQ(AbstractQueue):
|
|
|
31
33
|
body=queue_message.model_dump_json(),
|
|
32
34
|
)
|
|
33
35
|
|
|
36
|
+
def get_size(self) -> Union[int, None]:
|
|
37
|
+
return None
|
|
38
|
+
|
|
34
39
|
def get(self) -> QueueMessage:
|
|
35
40
|
raise NotImplementedError(
|
|
36
41
|
"RabbitMQ does not support getting messages from the queue, directly use callback in consumer."
|
bizon/engine/queue/queue.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
from typing import List, Optional
|
|
3
|
+
from typing import List, Optional, Union
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel
|
|
6
6
|
from pytz import UTC
|
|
@@ -45,6 +45,11 @@ class AbstractQueue(ABC):
|
|
|
45
45
|
"""Get a QueueMessage object from the queue system"""
|
|
46
46
|
pass
|
|
47
47
|
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def get_size(self) -> Union[int, None]:
|
|
50
|
+
"""If queue is compatible, return size of the queue"""
|
|
51
|
+
pass
|
|
52
|
+
|
|
48
53
|
@abstractmethod
|
|
49
54
|
def terminate(self, iteration: int) -> bool:
|
|
50
55
|
"""Send a termination signal in the queue system"""
|
bizon/source/discover.py
CHANGED
|
@@ -143,7 +143,7 @@ def parse_streams_from_filepath(source_name: str, filepath: str, skip_unavailabl
|
|
|
143
143
|
|
|
144
144
|
# Transform the relative path to a python import path and import the module
|
|
145
145
|
python_import_path = get_python_import_path(relative_path)
|
|
146
|
-
logger.
|
|
146
|
+
logger.debug(f"Importing {python_import_path}")
|
|
147
147
|
|
|
148
148
|
try:
|
|
149
149
|
source_module = importlib.import_module(python_import_path, package="sources")
|
|
@@ -45,6 +45,8 @@ class KafkaSourceConfig(SourceConfig):
|
|
|
45
45
|
consumer_timeout: int = Field(10, description="Kafka consumer timeout in seconds")
|
|
46
46
|
group_id: str = Field("bizon", description="Kafka group id")
|
|
47
47
|
|
|
48
|
+
max_consumer_threads: int = Field(16, description="Maximum number of threads for the consumer")
|
|
49
|
+
|
|
48
50
|
nb_bytes_schema_id: Literal[4, 8] = Field(
|
|
49
51
|
4, description="Number of bytes for the schema id. 4 is the default for majority of the cases"
|
|
50
52
|
)
|
|
@@ -96,7 +98,7 @@ class KafkaSource(AbstractSource):
|
|
|
96
98
|
}
|
|
97
99
|
|
|
98
100
|
# Consumer instance
|
|
99
|
-
self.consumer = Consumer(self.kafka_consumer_conf)
|
|
101
|
+
self.consumer = Consumer(self.kafka_consumer_conf, logger=logger)
|
|
100
102
|
|
|
101
103
|
@staticmethod
|
|
102
104
|
def streams() -> List[str]:
|
|
@@ -200,7 +202,7 @@ class KafkaSource(AbstractSource):
|
|
|
200
202
|
source_timestamp = datetime.now(tz=timezone.utc)
|
|
201
203
|
|
|
202
204
|
# Set consumer offset params
|
|
203
|
-
consumer = Consumer(self.kafka_consumer_conf)
|
|
205
|
+
consumer = Consumer(self.kafka_consumer_conf, logger=logger)
|
|
204
206
|
consumer.assign([TopicPartition(self.config.topic, partition, topic_offsets.get_partition_offset(partition))])
|
|
205
207
|
consumer.seek(TopicPartition(self.config.topic, partition, topic_offsets.get_partition_offset(partition)))
|
|
206
208
|
|
|
@@ -223,6 +225,7 @@ class KafkaSource(AbstractSource):
|
|
|
223
225
|
raise ValueError(f"Number of bytes for schema id {self.config.nb_bytes_schema_id} not supported")
|
|
224
226
|
|
|
225
227
|
data = self.decode(message.value(), schema)
|
|
228
|
+
data["_bizon_message_key"] = message.key().decode("utf-8")
|
|
226
229
|
|
|
227
230
|
# Get the source timestamp
|
|
228
231
|
if self.parse_timestamp:
|
|
@@ -261,7 +264,7 @@ class KafkaSource(AbstractSource):
|
|
|
261
264
|
|
|
262
265
|
# Use ThreadPoolExecutor to parallelize reading partitions
|
|
263
266
|
records = []
|
|
264
|
-
with ThreadPoolExecutor(max_workers=nb_partitions) as executor:
|
|
267
|
+
with ThreadPoolExecutor(max_workers=min(nb_partitions, self.config.max_consumer_threads)) as executor:
|
|
265
268
|
futures = {executor.submit(self.read_partition, i, topic_offsets): i for i in range(nb_partitions)}
|
|
266
269
|
for future in as_completed(futures):
|
|
267
270
|
partition_records = future.result()
|
|
@@ -3,5 +3,7 @@ import os
|
|
|
3
3
|
from bizon.engine.engine import RunnerFactory
|
|
4
4
|
|
|
5
5
|
if __name__ == "__main__":
|
|
6
|
-
runner = RunnerFactory.create_from_yaml(
|
|
6
|
+
runner = RunnerFactory.create_from_yaml(
|
|
7
|
+
filepath=os.path.abspath("bizon/sources/kafka/config/kafka_teams_users_eu_west1_c511.yml")
|
|
8
|
+
)
|
|
7
9
|
runner.run()
|
|
@@ -6,16 +6,16 @@ bizon/common/errors/backoff.py,sha256=z7RkQt1Npdh0sfD3hBDaiWQKe4iqS6ewvT1Q4Fds5a
|
|
|
6
6
|
bizon/common/errors/errors.py,sha256=mrYx1uE2kOuR2pEaB7ztK1l2m0E4V-_-hxq-DuILerY,682
|
|
7
7
|
bizon/common/models.py,sha256=7_HKAxOyN9eK8hmqahzHhmK-TYVAuRtGOgf4iadE7FI,1751
|
|
8
8
|
bizon/destinations/bigquery/config/bigquery.example.yml,sha256=mvKtFS_PUuekyMh9xssuwRfFwLtR-rVvpIy5xmF5__k,1261
|
|
9
|
-
bizon/destinations/bigquery/src/config.py,sha256=
|
|
10
|
-
bizon/destinations/bigquery/src/destination.py,sha256=
|
|
9
|
+
bizon/destinations/bigquery/src/config.py,sha256=QlD-FdBJ8Q6nKPrOf5q28lHnyFE8khT41dSR1s2meeM,1378
|
|
10
|
+
bizon/destinations/bigquery/src/destination.py,sha256=tPxE0IpHbR4zDkW5HaiHkgeDRDY2AibIPzY9iftZ2Uc,11079
|
|
11
11
|
bizon/destinations/buffer.py,sha256=bFYkaoge-3AyKfGolqsuB3PWWtdPt65Fllrz-3X_uMI,2594
|
|
12
|
-
bizon/destinations/config.py,sha256=
|
|
13
|
-
bizon/destinations/destination.py,sha256=
|
|
12
|
+
bizon/destinations/config.py,sha256=jD4nkG-sg7mzJMFKLErQBkJu7ri0PMbCRVU3xIvFT7E,1686
|
|
13
|
+
bizon/destinations/destination.py,sha256=qY2VIr_vEi7p589WPN6YeSRQcgehuDJtyLrvPDIC71Q,11276
|
|
14
14
|
bizon/destinations/file/src/config.py,sha256=C4BBIKzBH5343iLGR3aCubAGjPo0b2LegsCLjb77uFA,513
|
|
15
15
|
bizon/destinations/file/src/destination.py,sha256=1VCrVdtzAzwSKgYq0JUOc3r2cM7314dV-eIoAFhM_64,1003
|
|
16
16
|
bizon/destinations/logger/src/config.py,sha256=AWY3R9q3ZjD3uQ_KBq8VcW60deKSIHe3qtgCKjdywKk,433
|
|
17
17
|
bizon/destinations/logger/src/destination.py,sha256=xTt03F3AMI9KhQno2tGoCr3eacrO62qjnOlpeEHk6tQ,868
|
|
18
|
-
bizon/destinations/models.py,sha256=
|
|
18
|
+
bizon/destinations/models.py,sha256=hK7yXMoOArLJ5sUS9kgljXMBaq2vqu1l_7u707yS1KM,3630
|
|
19
19
|
bizon/engine/backend/adapters/sqlalchemy/backend.py,sha256=R0CztRGc3_6PdIIgbbrDYD2OJRNhq9PPmD6PYK7-fjk,15567
|
|
20
20
|
bizon/engine/backend/adapters/sqlalchemy/config.py,sha256=K-FpE_-VHnTSAQOduouhXFVy43EkrKbeZLqr9_OfeMw,1846
|
|
21
21
|
bizon/engine/backend/backend.py,sha256=Bodqoo5qJHV0H2zJJeGytaHGiNZmBjnLBxiRgq6M3kE,5844
|
|
@@ -25,18 +25,18 @@ bizon/engine/config.py,sha256=cKgI1IfzDncoxG3FsKUz-Aa3fU41ucQPaafjjhKeU90,2039
|
|
|
25
25
|
bizon/engine/engine.py,sha256=bdQksSQfxkeAHbbe52_MbqTJieOURjlMGYtkCCaDtuc,990
|
|
26
26
|
bizon/engine/pipeline/consumer.py,sha256=HU3G2_h5ZUM217mnKSktdvib2nRc9r8OzvqWodRdFk0,424
|
|
27
27
|
bizon/engine/pipeline/models.py,sha256=kfr_kqkJMEVlWX35rJiYMCuEBCrNhsx9R0a19E39i14,216
|
|
28
|
-
bizon/engine/pipeline/producer.py,sha256=
|
|
28
|
+
bizon/engine/pipeline/producer.py,sha256=k0dzSa6_7PiTJF0UtX0BAfvSTS0h7XFVZ3JN_3_ZvQQ,8330
|
|
29
29
|
bizon/engine/queue/adapters/kafka/config.py,sha256=o7GAb_ls9N0nQV04B6Y4XjLo-Q57x28r63gjFG9LvVg,1091
|
|
30
30
|
bizon/engine/queue/adapters/kafka/consumer.py,sha256=mh25mTjO7w6CGwJDWtxHVocwZi6DbTIVncm81rmhKrw,2576
|
|
31
|
-
bizon/engine/queue/adapters/kafka/queue.py,sha256=
|
|
31
|
+
bizon/engine/queue/adapters/kafka/queue.py,sha256=IS6akN7F81lkAajQdgqSqlqAg3r8uXbw6SdByDgvdMM,1965
|
|
32
32
|
bizon/engine/queue/adapters/python_queue/config.py,sha256=D_CAuWJtdMQmQcm9gq9YBrkeFHAxZKRc7kIISliyp_4,847
|
|
33
33
|
bizon/engine/queue/adapters/python_queue/consumer.py,sha256=yEoDF6QEmr9gjNGxXRqypdIHIJ50lQh_fFDhDXk6_g8,1566
|
|
34
|
-
bizon/engine/queue/adapters/python_queue/queue.py,sha256=
|
|
34
|
+
bizon/engine/queue/adapters/python_queue/queue.py,sha256=VVc5A7qU2wgWEeeG6UOmgkmoIiwZ7GZGjSiBThloFzk,2259
|
|
35
35
|
bizon/engine/queue/adapters/rabbitmq/config.py,sha256=9N_7WREvNjJgcNTC3Y2kHII-iId2MZa3ssHHks6PyAs,987
|
|
36
36
|
bizon/engine/queue/adapters/rabbitmq/consumer.py,sha256=cN6K8wSBIQUSuRD7VsNltS6ElZ32PW92ZXiugzIDPJU,2019
|
|
37
|
-
bizon/engine/queue/adapters/rabbitmq/queue.py,sha256=
|
|
37
|
+
bizon/engine/queue/adapters/rabbitmq/queue.py,sha256=gaTCIY_mCfWt8LCjfEymZuIiwqPkYQoVvaOacRYgLJo,1709
|
|
38
38
|
bizon/engine/queue/config.py,sha256=PN9Je_Q9Sxo-3fI8lI6rZPQ9oeWatnD4rzUTWj3NhnA,792
|
|
39
|
-
bizon/engine/queue/queue.py,sha256=
|
|
39
|
+
bizon/engine/queue/queue.py,sha256=_pOhmDZs79V7XgjthrKsFxZvae_4_cvol97jcZ-YR3g,2926
|
|
40
40
|
bizon/engine/runner/adapters/process.py,sha256=idyknLADcmhCS4614WtyO-FqaYChV243gvjzPWvk0KE,2525
|
|
41
41
|
bizon/engine/runner/adapters/thread.py,sha256=HUIJm5xg_yKdD2JOndvrod6x3qQ3uC6uakfc4m3XMso,2609
|
|
42
42
|
bizon/engine/runner/config.py,sha256=QPgfy6YnS-EW8nhpTg1aRHshbGz5QTrQ5R3pDmLkIE0,1272
|
|
@@ -51,7 +51,7 @@ bizon/source/auth/builder.py,sha256=hc4zBNj31LZc-QqgIyx1VQEYTm9Xv81vY5pJiwQroJo,
|
|
|
51
51
|
bizon/source/auth/config.py,sha256=2jjcBLP95XsCkfKxdUei4X2yHI2WX92lJb8D8Txw86g,750
|
|
52
52
|
bizon/source/config.py,sha256=DPwJsBfU48yMvCw-pQCEha4X-IUjvmnQzjTwgsaCxAA,2307
|
|
53
53
|
bizon/source/cursor.py,sha256=TSgWe1T9b4x7EEsbk22hwTWwVXCk5vdrs9eaHNhrevo,3983
|
|
54
|
-
bizon/source/discover.py,sha256=
|
|
54
|
+
bizon/source/discover.py,sha256=C0_SnFxeHpz4VernxAfu2gbnQuoqv0cWX9z5J3WlCKw,11120
|
|
55
55
|
bizon/source/models.py,sha256=iVp0H4muOWGst1W5DuxEVtHIY6lewOV8zDZUqvPTcBk,1337
|
|
56
56
|
bizon/source/session.py,sha256=z4dZlKC_PD8w_utTuAqs1vsfGuRkxHh5WQZhVKamNd0,1979
|
|
57
57
|
bizon/source/source.py,sha256=NhxMU1yXgi7mL64RyeymOYNqRk6fad9v_S8lhvXYUI0,3390
|
|
@@ -76,16 +76,16 @@ bizon/sources/hubspot/src/hubspot_objects.py,sha256=EmABx9XD8q6g4Uc5mHLv5YYl5KcI
|
|
|
76
76
|
bizon/sources/hubspot/src/models/hs_object.py,sha256=-Y20H3-nenJyySMlvM4TPttPz4O8qm3ArKP_I8pxsuo,1235
|
|
77
77
|
bizon/sources/hubspot/tests/hubspot_pipeline.py,sha256=e6dCF5_MHMySkeiF6kKrSAuCa_48J22-ZeSCZSjrfUI,216
|
|
78
78
|
bizon/sources/kafka/config/kafka.example.yml,sha256=ZyHBmSWZ_5WQaBr9WzD05PuE6vi3hhYgHh2VZ-IU-Iw,755
|
|
79
|
-
bizon/sources/kafka/src/source.py,sha256=
|
|
80
|
-
bizon/sources/kafka/tests/kafka_pipeline.py,sha256=
|
|
79
|
+
bizon/sources/kafka/src/source.py,sha256=0_LJKJBAy8LeW0AV0T6wGecwP-1Uid-ri9dDyU9KxKQ,11373
|
|
80
|
+
bizon/sources/kafka/tests/kafka_pipeline.py,sha256=DrMHq96ZDiQ2lWmxEf_aX7HmBg_qNOsSFGTuGmuhly8,252
|
|
81
81
|
bizon/sources/periscope/config/periscope_charts.example.yml,sha256=rpFDAWeU5oZ3UOiX0sSAgd1X5lv6t-s3iqiDPnRqutU,477
|
|
82
82
|
bizon/sources/periscope/config/periscope_dashboards.example.yml,sha256=sN2iGGqCQCvrMXcwxNGq_dR7-KZ1KtYdXmNYKXlfEpg,481
|
|
83
83
|
bizon/sources/periscope/src/source.py,sha256=AZM-HDDjdTWj8akeeofQ_-G8YlnNHEKi2mjEQSYwOvE,7638
|
|
84
84
|
bizon/sources/periscope/tests/periscope_pipeline_charts.py,sha256=mU0JtfhS1KmWsS3iovGhGxK7iPVWiYzjBM_QfRL3ZQI,275
|
|
85
85
|
bizon/sources/periscope/tests/periscope_pipeline_dashboard.py,sha256=vZKN7UfH-lQIWrnfjPqQFjZm28UIw2m9OSg4yS-Wckk,279
|
|
86
86
|
bizon/utils.py,sha256=HXaPiyxpWKoy3XN5vSYOve1ezlFeOYin3aFqTjcabUQ,81
|
|
87
|
-
bizon-0.0.
|
|
88
|
-
bizon-0.0.
|
|
89
|
-
bizon-0.0.
|
|
90
|
-
bizon-0.0.
|
|
91
|
-
bizon-0.0.
|
|
87
|
+
bizon-0.0.10.dist-info/LICENSE,sha256=AW7SjYVT2bBnXOxgDxqy_e_JF8jDCFlMCaPCF11wFDI,1072
|
|
88
|
+
bizon-0.0.10.dist-info/METADATA,sha256=uG50AwUmj9iTn541sPTxfdXHpZ57NHX_rTo9cG-28jw,5647
|
|
89
|
+
bizon-0.0.10.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
90
|
+
bizon-0.0.10.dist-info/entry_points.txt,sha256=wtCd-6JswSY8lPWYSvOf7ASX1zfKgmgXtgg5XQS5274,44
|
|
91
|
+
bizon-0.0.10.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|