ingestr 0.6.5__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ingestr might be problematic. Click here for more details.

@@ -1,12 +1,12 @@
1
1
  """SQL database source helpers"""
2
2
 
3
3
  import operator
4
+ import warnings
4
5
  from typing import (
5
6
  Any,
6
7
  Callable,
7
8
  Dict,
8
9
  Iterator,
9
- List,
10
10
  Literal,
11
11
  Optional,
12
12
  Union,
@@ -16,16 +16,19 @@ import dlt
16
16
  from dlt.common.configuration.specs import BaseConfiguration, configspec
17
17
  from dlt.common.exceptions import MissingDependencyException
18
18
  from dlt.common.schema import TTableSchemaColumns
19
- from dlt.common.typing import TDataItem
20
- from dlt.sources.credentials import ConnectionStringCredentials
21
- from sqlalchemy import Table, create_engine
19
+ from dlt.common.typing import TDataItem, TSortOrder
20
+ from sqlalchemy import create_engine
22
21
  from sqlalchemy.engine import Engine
22
+ from sqlalchemy.exc import CompileError
23
23
 
24
- from ingestr.src.sql_database.override import IngestrConnectionStringCredentials
25
-
24
+ from .arrow_helpers import row_tuples_to_arrow
25
+ from .override import IngestrConnectionStringCredentials as ConnectionStringCredentials
26
26
  from .schema_types import (
27
+ ReflectionLevel,
27
28
  SelectAny,
28
- row_tuples_to_arrow,
29
+ Table,
30
+ TTypeAdapter,
31
+ get_primary_key,
29
32
  table_to_columns,
30
33
  )
31
34
 
@@ -57,7 +60,7 @@ class TableLoader:
57
60
  ) from e
58
61
  self.last_value = incremental.last_value
59
62
  self.end_value = incremental.end_value
60
- self.row_order = getattr(self.incremental, "row_order", None)
63
+ self.row_order: TSortOrder = self.incremental.row_order
61
64
  else:
62
65
  self.cursor_column = None
63
66
  self.last_value = None
@@ -69,6 +72,7 @@ class TableLoader:
69
72
  query = table.select()
70
73
  if not self.incremental:
71
74
  return query
75
+
72
76
  last_value_func = self.incremental.last_value_func
73
77
 
74
78
  # generate where
@@ -90,9 +94,13 @@ class TableLoader:
90
94
 
91
95
  # generate order by from declared row order
92
96
  order_by = None
93
- if self.row_order == "asc":
97
+ if (self.row_order == "asc" and last_value_func is max) or (
98
+ self.row_order == "desc" and last_value_func is min
99
+ ):
94
100
  order_by = self.cursor_column.asc()
95
- elif self.row_order == "desc":
101
+ elif (self.row_order == "asc" and last_value_func is min) or (
102
+ self.row_order == "desc" and last_value_func is max
103
+ ):
96
104
  order_by = self.cursor_column.desc()
97
105
  if order_by is not None:
98
106
  query = query.order_by(order_by)
@@ -121,14 +129,15 @@ class TableLoader:
121
129
  elif self.backend == "pandas":
122
130
  from dlt.common.libs.pandas_sql import _wrap_result
123
131
 
124
- yield _wrap_result(
132
+ df = _wrap_result(
125
133
  partition,
126
134
  columns,
127
135
  **{"dtype_backend": "pyarrow", **backend_kwargs},
128
136
  )
137
+ yield df
129
138
  elif self.backend == "pyarrow":
130
139
  yield row_tuples_to_arrow(
131
- partition, self.columns, tz=backend_kwargs.get("tz")
140
+ partition, self.columns, tz=backend_kwargs.get("tz", "UTC")
132
141
  )
133
142
 
134
143
  def _load_rows_connectorx(
@@ -153,11 +162,15 @@ class TableLoader:
153
162
  drivername=self.engine.url.get_backend_name()
154
163
  ).render_as_string(hide_password=False),
155
164
  )
156
- df = cx.read_sql(
157
- conn,
158
- str(query.compile(self.engine, compile_kwargs={"literal_binds": True})),
159
- **backend_kwargs,
160
- )
165
+ try:
166
+ query_str = str(
167
+ query.compile(self.engine, compile_kwargs={"literal_binds": True})
168
+ )
169
+ except CompileError as ex:
170
+ raise NotImplementedError(
171
+ f"Query for table {self.table.name} could not be compiled to string to execute it on ConnectorX. If you are on SQLAlchemy 1.4.x the causing exception is due to literals that cannot be rendered, upgrade to 2.x: {str(ex)}"
172
+ ) from ex
173
+ df = cx.read_sql(conn, query_str, **backend_kwargs)
161
174
  yield df
162
175
 
163
176
 
@@ -167,10 +180,11 @@ def table_rows(
167
180
  chunk_size: int,
168
181
  backend: TableBackend,
169
182
  incremental: Optional[dlt.sources.incremental[Any]] = None,
170
- detect_precision_hints: bool = False,
171
183
  defer_table_reflect: bool = False,
172
184
  table_adapter_callback: Callable[[Table], None] = None,
185
+ reflection_level: ReflectionLevel = "minimal",
173
186
  backend_kwargs: Dict[str, Any] = None,
187
+ type_adapter_callback: Optional[TTypeAdapter] = None,
174
188
  ) -> Iterator[TDataItem]:
175
189
  columns: TTableSchemaColumns = None
176
190
  if defer_table_reflect:
@@ -179,7 +193,7 @@ def table_rows(
179
193
  )
180
194
  if table_adapter_callback:
181
195
  table_adapter_callback(table)
182
- columns = table_to_columns(table, detect_precision_hints)
196
+ columns = table_to_columns(table, reflection_level, type_adapter_callback)
183
197
 
184
198
  # set the primary_key in the incremental
185
199
  if incremental and incremental.primary_key is None:
@@ -196,7 +210,7 @@ def table_rows(
196
210
  )
197
211
  else:
198
212
  # table was already reflected
199
- columns = table_to_columns(table, detect_precision_hints)
213
+ columns = table_to_columns(table, reflection_level, type_adapter_callback)
200
214
 
201
215
  loader = TableLoader(
202
216
  engine, backend, table, columns, incremental=incremental, chunk_size=chunk_size
@@ -205,19 +219,13 @@ def table_rows(
205
219
 
206
220
 
207
221
  def engine_from_credentials(
208
- credentials: Union[ConnectionStringCredentials, Engine, str],
222
+ credentials: Union[ConnectionStringCredentials, Engine, str], **backend_kwargs: Any
209
223
  ) -> Engine:
210
224
  if isinstance(credentials, Engine):
211
225
  return credentials
212
226
  if isinstance(credentials, ConnectionStringCredentials):
213
227
  credentials = credentials.to_native_representation()
214
- return create_engine(credentials)
215
-
216
-
217
- def get_primary_key(table: Table) -> List[str]:
218
- """Create primary key or return None if no key defined"""
219
- primary_key = [c.name for c in table.primary_key]
220
- return primary_key if len(primary_key) > 0 else None
228
+ return create_engine(credentials, **backend_kwargs)
221
229
 
222
230
 
223
231
  def unwrap_json_connector_x(field: str) -> TDataItem:
@@ -242,6 +250,20 @@ def unwrap_json_connector_x(field: str) -> TDataItem:
242
250
  return _unwrap
243
251
 
244
252
 
253
+ def _detect_precision_hints_deprecated(value: Optional[bool]) -> None:
254
+ if value is None:
255
+ return
256
+
257
+ msg = "`detect_precision_hints` argument is deprecated and will be removed in a future release. "
258
+ if value:
259
+ msg += "Use `reflection_level='full_with_precision'` which has the same effect instead."
260
+
261
+ warnings.warn(
262
+ msg,
263
+ DeprecationWarning,
264
+ )
265
+
266
+
245
267
  @configspec
246
268
  class SqlDatabaseTableConfiguration(BaseConfiguration):
247
269
  incremental: Optional[dlt.sources.incremental] = None # type: ignore[type-arg]
@@ -249,10 +271,12 @@ class SqlDatabaseTableConfiguration(BaseConfiguration):
249
271
 
250
272
  @configspec
251
273
  class SqlTableResourceConfiguration(BaseConfiguration):
252
- credentials: IngestrConnectionStringCredentials = None
274
+ credentials: Union[ConnectionStringCredentials, Engine, str] = None
253
275
  table: str = None
254
- incremental: Optional[dlt.sources.incremental] = None # type: ignore[type-arg]
255
276
  schema: Optional[str] = None
256
-
257
-
258
- __source_name__ = "sql_database"
277
+ incremental: Optional[dlt.sources.incremental] = None # type: ignore[type-arg]
278
+ chunk_size: int = 50000
279
+ backend: TableBackend = "sqlalchemy"
280
+ detect_precision_hints: Optional[bool] = None
281
+ defer_table_reflect: Optional[bool] = False
282
+ reflection_level: Optional[ReflectionLevel] = "full"
@@ -1,39 +1,73 @@
1
- from typing import TYPE_CHECKING, Any, Optional, Sequence, Type
1
+ from typing import (
2
+ TYPE_CHECKING,
3
+ Any,
4
+ Callable,
5
+ List,
6
+ Literal,
7
+ Optional,
8
+ Type,
9
+ Union,
10
+ )
2
11
 
3
12
  from dlt.common import logger
4
- from dlt.common.configuration import with_config
5
- from dlt.common.destination import DestinationCapabilitiesContext
6
13
  from dlt.common.schema.typing import TColumnSchema, TTableSchemaColumns
7
14
  from sqlalchemy import Column, Table
8
15
  from sqlalchemy.engine import Row
9
16
  from sqlalchemy.sql import Select, sqltypes
17
+ from sqlalchemy.sql.sqltypes import TypeEngine
10
18
  from typing_extensions import TypeAlias
11
19
 
20
+ ReflectionLevel = Literal["minimal", "full", "full_with_precision"]
21
+
22
+
12
23
  # optionally create generics with any so they can be imported by dlt importer
13
24
  if TYPE_CHECKING:
14
25
  SelectAny: TypeAlias = Select[Any]
15
26
  ColumnAny: TypeAlias = Column[Any]
16
27
  RowAny: TypeAlias = Row[Any]
28
+ TypeEngineAny = TypeEngine[Any]
17
29
  else:
18
30
  SelectAny: TypeAlias = Type[Any]
19
31
  ColumnAny: TypeAlias = Type[Any]
20
32
  RowAny: TypeAlias = Type[Any]
33
+ TypeEngineAny = Type[Any]
34
+
35
+
36
+ TTypeAdapter = Callable[
37
+ [TypeEngineAny], Optional[Union[TypeEngineAny, Type[TypeEngineAny]]]
38
+ ]
21
39
 
22
40
 
23
41
  def sqla_col_to_column_schema(
24
- sql_col: ColumnAny, add_precision: bool = False
42
+ sql_col: ColumnAny,
43
+ reflection_level: ReflectionLevel,
44
+ type_adapter_callback: Optional[TTypeAdapter] = None,
25
45
  ) -> Optional[TColumnSchema]:
26
46
  """Infer dlt schema column type from an sqlalchemy type.
27
47
 
28
48
  If `add_precision` is set, precision and scale is inferred from that types that support it,
29
49
  such as numeric, varchar, int, bigint. Numeric (decimal) types have always precision added.
30
50
  """
31
- sql_t = sql_col.type
32
51
  col: TColumnSchema = {
33
52
  "name": sql_col.name,
34
- "data_type": None, # set that later
35
53
  "nullable": sql_col.nullable,
36
54
  }
55
+ if reflection_level == "minimal":
56
+ return col
57
+
58
+ sql_t = sql_col.type
59
+
60
+ if type_adapter_callback:
61
+ sql_t = type_adapter_callback(sql_t) # type: ignore[assignment]
62
+ # Check if sqla type class rather than instance is returned
63
+ if sql_t is not None and isinstance(sql_t, type):
64
+ sql_t = sql_t()
65
+
66
+ if sql_t is None:
67
+ # Column ignored by callback
68
+ return col
69
+
70
+ add_precision = reflection_level == "full_with_precision"
37
71
 
38
72
  if isinstance(sql_t, sqltypes.SmallInteger):
39
73
  col["data_type"] = "bigint"
@@ -77,86 +111,29 @@ def sqla_col_to_column_schema(
77
111
  col["data_type"] = "bool"
78
112
  else:
79
113
  logger.warning(
80
- f"A column with name {sql_col.name} contains unknown data type {sql_t} which cannot be mapped to `dlt` data type. When using sqlalchemy backend such data will be passed to the normalizer. In case of `pyarrow` backend such data will be ignored. In case of other backends, the behavior is backend-specific."
114
+ f"A column with name {sql_col.name} contains unknown data type {sql_t} which cannot be mapped to `dlt` data type. When using sqlalchemy backend such data will be passed to the normalizer. In case of `pyarrow` and `pandas` backend, data types are detected from numpy ndarrays. In case of other backends, the behavior is backend-specific."
81
115
  )
82
- col = None
83
- if col:
84
- return {key: value for key, value in col.items() if value is not None} # type: ignore[return-value]
85
- return None
86
116
 
117
+ return {key: value for key, value in col.items() if value is not None} # type: ignore[return-value]
87
118
 
88
- def table_to_columns(table: Table, add_precision: bool = False) -> TTableSchemaColumns:
89
- """Convert an sqlalchemy table to a dlt table schema.
90
119
 
91
- Adds precision to columns when `add_precision` is set.
92
- """
93
- return {
94
- col["name"]: col
95
- for col in (sqla_col_to_column_schema(c, add_precision) for c in table.columns)
96
- if col is not None
97
- }
120
+ def get_primary_key(table: Table) -> Optional[List[str]]:
121
+ """Create primary key or return None if no key defined"""
122
+ primary_key = [c.name for c in table.primary_key]
123
+ return primary_key if len(primary_key) > 0 else None
98
124
 
99
125
 
100
- @with_config
101
- def columns_to_arrow(
102
- columns_schema: TTableSchemaColumns,
103
- caps: DestinationCapabilitiesContext = None,
104
- tz: str = "UTC",
105
- ) -> Any:
106
- """Converts `column_schema` to arrow schema using `caps` and `tz`. `caps` are injected from the container - which
107
- is always the case if run within the pipeline. This will generate arrow schema compatible with the destination.
108
- Otherwise generic capabilities are used
109
- """
110
- from dlt.common.destination.capabilities import DestinationCapabilitiesContext
111
- from dlt.common.libs.pyarrow import get_py_arrow_datatype
112
- from dlt.common.libs.pyarrow import pyarrow as pa
113
-
114
- return pa.schema(
115
- [
116
- pa.field(
117
- name,
118
- get_py_arrow_datatype(
119
- schema_item,
120
- caps or DestinationCapabilitiesContext.generic_capabilities(),
121
- tz,
122
- ),
123
- nullable=schema_item.get("nullable", True),
124
- )
125
- for name, schema_item in columns_schema.items()
126
- ]
127
- )
128
-
129
-
130
- def row_tuples_to_arrow(
131
- rows: Sequence[RowAny], columns: TTableSchemaColumns, tz: str
132
- ) -> Any:
133
- import numpy as np
134
- from dlt.common.libs.pyarrow import pyarrow as pa
135
-
136
- arrow_schema = columns_to_arrow(columns, tz=tz)
137
-
138
- try:
139
- from pandas._libs import lib
140
-
141
- pivoted_rows = lib.to_object_array_tuples(rows).T # type: ignore[attr-defined]
142
- except ImportError:
143
- logger.info(
144
- "Pandas not installed, reverting to numpy.asarray to create a table which is slower"
126
+ def table_to_columns(
127
+ table: Table,
128
+ reflection_level: ReflectionLevel = "full",
129
+ type_conversion_fallback: Optional[TTypeAdapter] = None,
130
+ ) -> TTableSchemaColumns:
131
+ """Convert an sqlalchemy table to a dlt table schema."""
132
+ return {
133
+ col["name"]: col
134
+ for col in (
135
+ sqla_col_to_column_schema(c, reflection_level, type_conversion_fallback)
136
+ for c in table.columns
145
137
  )
146
- pivoted_rows = np.asarray(rows, dtype="object", order="k").T # type: ignore[call-overload]
147
-
148
- columnar = {
149
- col: dat.ravel()
150
- for col, dat in zip(columns, np.vsplit(pivoted_rows, len(columns)))
138
+ if col is not None
151
139
  }
152
- for idx in range(0, len(arrow_schema.names)):
153
- field = arrow_schema.field(idx)
154
- py_type = type(rows[0][idx])
155
- # cast double / float ndarrays to decimals if type mismatch, looks like decimals and floats are often mixed up in dialects
156
- if pa.types.is_decimal(field.type) and issubclass(py_type, (str, float)):
157
- logger.warning(
158
- f"Field {field.name} was reflected as decimal type, but rows contains {py_type.__name__}. Additional cast is required which may slow down arrow table generation."
159
- )
160
- float_array = pa.array(columnar[field.name], type=pa.float64())
161
- columnar[field.name] = float_array.cast(field.type, safe=False)
162
- return pa.Table.from_pydict(columnar, schema=arrow_schema)
@@ -0,0 +1,15 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass
5
+ class TableDefinition:
6
+ dataset: str
7
+ table: str
8
+
9
+
10
+ def table_string_to_dataclass(table: str) -> TableDefinition:
11
+ table_fields = table.split(".", 1)
12
+ if len(table_fields) != 2:
13
+ raise ValueError("Table name must be in the format <schema>.<table>")
14
+
15
+ return TableDefinition(dataset=table_fields[0], table=table_fields[1])
ingestr/src/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.6.5"
1
+ __version__ = "0.7.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: ingestr
3
- Version: 0.6.5
3
+ Version: 0.7.0
4
4
  Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
5
5
  Project-URL: Homepage, https://github.com/bruin-data/ingestr
6
6
  Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
@@ -16,11 +16,12 @@ Classifier: Topic :: Database
16
16
  Requires-Python: >=3.9
17
17
  Requires-Dist: cx-oracle==8.3.0
18
18
  Requires-Dist: databricks-sql-connector==2.9.3
19
- Requires-Dist: dlt==0.4.12
19
+ Requires-Dist: dlt==0.5.1
20
20
  Requires-Dist: duckdb-engine==0.11.5
21
21
  Requires-Dist: duckdb==0.10.2
22
22
  Requires-Dist: google-api-python-client==2.130.0
23
23
  Requires-Dist: google-cloud-bigquery-storage==2.24.0
24
+ Requires-Dist: mysql-connector-python==9.0.0
24
25
  Requires-Dist: pendulum==3.0.0
25
26
  Requires-Dist: psycopg2-binary==2.9.9
26
27
  Requires-Dist: py-machineid==0.5.1
@@ -171,6 +172,11 @@ Join our Slack community [here](https://join.slack.com/t/bruindatacommunity/shar
171
172
  <tr>
172
173
  <td colspan="3" style='text-align:center;'><strong>Platforms</strong></td>
173
174
  </tr>
175
+ <tr>
176
+ <td>Gorgias</td>
177
+ <td>✅</td>
178
+ <td>❌</td>
179
+ </tr>
174
180
  <tr>
175
181
  <td>Google Sheets</td>
176
182
  <td>✅</td>
@@ -1,12 +1,9 @@
1
- ingestr/main.py,sha256=PvZtqrlHO3aSFtdhIdLAhzsOLG1hP_ql24vof76SFaI,14862
2
- ingestr/main_test.py,sha256=MDV2Eo86W_CcxGgEkYYoBc6xIXjVMER4hMhgAdxXYMc,28464
1
+ ingestr/main.py,sha256=JYgh3rZSO9n_Ko2D_9BLaF_cGQHrLSywK1WH6XioefQ,15961
3
2
  ingestr/src/destinations.py,sha256=2SfPMjtTelPmzQmc3zNs8xGcKIPuGn_hoZFIBUuhjXI,6338
4
- ingestr/src/destinations_test.py,sha256=rgEk8EpAntFbSOwXovC4prv3RA22mwq8pIO6sZ_rYzg,4212
5
- ingestr/src/factory.py,sha256=7skwetBXFIwAuKyTFfffGPgSo_PRRZ5uEnxOHUv28yQ,3517
6
- ingestr/src/factory_test.py,sha256=X9sFkvNByWChIcyeDt1QiIPMIzGNKb7M5A_GUE0-nnI,664
7
- ingestr/src/sources.py,sha256=v0oFu-Pt_Zv06FypBb9_rHKP_iIMpkgm9MMaVpOUjPg,10313
8
- ingestr/src/sources_test.py,sha256=t94u1lYAspxzfe-DkxVtq5vw6xrLWphipvwntrwrzqg,3930
9
- ingestr/src/version.py,sha256=KDgkBrBsBSUzbLgrOZ89YsNN06fU4j5bmcuEwo6q5pg,22
3
+ ingestr/src/factory.py,sha256=XuT_8LvWd7gBxOjoD_NiG-jtPvHNQ9nqOeoCJzhRb6Y,3630
4
+ ingestr/src/sources.py,sha256=QbSvECvGbHJKOpE9_dbq11343pA5ajsS9BPPPab1ivw,10007
5
+ ingestr/src/table_definition.py,sha256=REbAbqdlmUMUuRh8nEQRreWjPVOQ5ZcfqGkScKdCrmk,390
6
+ ingestr/src/version.py,sha256=RaANGbRu5e-vehwXI1-Qe2ggPPfs1TQaZj072JdbLk4,22
10
7
  ingestr/src/google_sheets/README.md,sha256=wFQhvmGpRA38Ba2N_WIax6duyD4c7c_pwvvprRfQDnw,5470
11
8
  ingestr/src/google_sheets/__init__.py,sha256=5qlX-6ilx5MW7klC7B_0jGSxloQSLkSESTh4nlY3Aos,6643
12
9
  ingestr/src/google_sheets/helpers/__init__.py,sha256=5hXZrZK8cMO3UOuL-s4OKOpdACdihQD0hYYlSEu-iQ8,35
@@ -14,7 +11,6 @@ ingestr/src/google_sheets/helpers/api_calls.py,sha256=RiVfdacbaneszhmuhYilkJnkc9
14
11
  ingestr/src/google_sheets/helpers/data_processing.py,sha256=WYO6z4XjGcG0Hat2J2enb-eLX5mSNVb2vaqRE83FBWU,11000
15
12
  ingestr/src/gorgias/__init__.py,sha256=BzX9X1Yc_1Mch6NP1pn26hjRIiaadErgHxkdJHw4P3o,21227
16
13
  ingestr/src/gorgias/helpers.py,sha256=DamuijnvhGY9hysQO4txrVMf4izkGbh5qfBKImdOINE,5427
17
- ingestr/src/gorgias/helpers_test.py,sha256=kSR2nhB8U8HZ8pgDnd7HvXlzojmBnpOm8fTKHJvvKGY,1580
18
14
  ingestr/src/mongodb/__init__.py,sha256=E7SDeCyYNkYZZ_RFhjCRDZUGpKtaxpPG5sFSmKJV62U,4336
19
15
  ingestr/src/mongodb/helpers.py,sha256=80vtAeNyUn1iMN0CeLrTlKqYN6I6fHF81Kd2UuE8Kns,5653
20
16
  ingestr/src/notion/__init__.py,sha256=36wUui8finbc85ObkRMq8boMraXMUehdABN_AMe_hzA,1834
@@ -26,10 +22,11 @@ ingestr/src/shopify/__init__.py,sha256=EWjpvZz7K6Pms7uUoqqkM4Wj0XeE2NrDvVp4BNM8d
26
22
  ingestr/src/shopify/exceptions.py,sha256=BhV3lIVWeBt8Eh4CWGW_REFJpGCzvW6-62yZrBWa3nQ,50
27
23
  ingestr/src/shopify/helpers.py,sha256=OO_Tw-HwVLnRhwT3vqUWEQEEcWIS9KWE6VDDe8BCC2w,4972
28
24
  ingestr/src/shopify/settings.py,sha256=StY0EPr7wFJ7KzRRDN4TKxV0_gkIS1wPj2eR4AYSsDk,141
29
- ingestr/src/sql_database/__init__.py,sha256=S5MVJr8juPSs61C2D7pInsTwNEHetChK6RjjhPAD0Lg,8845
30
- ingestr/src/sql_database/helpers.py,sha256=tbn-GjjBIVu3hVVh5vrUSiZqQ32_Tp0oBP5Fvv_wY4E,8986
25
+ ingestr/src/sql_database/__init__.py,sha256=HEqY6U-YzzbeZ8avIthj-Fatm2C3i3jqYs5DAIAu4Ss,11511
26
+ ingestr/src/sql_database/arrow_helpers.py,sha256=yze1X3A9nUQA4HeuFDDWrfJVkCq8Uo5UyDo_zhJtI60,5699
27
+ ingestr/src/sql_database/helpers.py,sha256=6o8e2_8MIuj3qlo40a2E6ns3gyK18ei1jCePONrMUjI,10191
31
28
  ingestr/src/sql_database/override.py,sha256=xbKGDztCzvrhJ5kJTXERal3LA56bEeVug4_rrTs8DgA,333
32
- ingestr/src/sql_database/schema_types.py,sha256=foGHh4iGagGLfS7nF3uGYhBjqgX0jlrjj0XYE1T3nSs,6592
29
+ ingestr/src/sql_database/schema_types.py,sha256=qXTanvFPE8wMCSDzQWPDi5yqaO-llfrFXjiGJALI4NA,5013
33
30
  ingestr/src/telemetry/event.py,sha256=MpWc5tt0lSJ1pWKe9HQ11BHrcPBxSH40l4wjZi9u0tI,924
34
31
  ingestr/src/testdata/fakebqcredentials.json,sha256=scc6TUc963KAbKTLZCfcmqVzbtzDCW1_8JNRnyAXyy8,628
35
32
  ingestr/testdata/.gitignore,sha256=DFzYYOpqdTiT7S1HjCT-jffZSmEvFZge295_upAB0FY,13
@@ -40,8 +37,8 @@ ingestr/testdata/delete_insert_part2.csv,sha256=B_KUzpzbNdDY_n7wWop1mT2cz36TmayS
40
37
  ingestr/testdata/merge_expected.csv,sha256=DReHqWGnQMsf2PBv_Q2pfjsgvikYFnf1zYcQZ7ZqYN0,276
41
38
  ingestr/testdata/merge_part1.csv,sha256=Pw8Z9IDKcNU0qQHx1z6BUf4rF_-SxKGFOvymCt4OY9I,185
42
39
  ingestr/testdata/merge_part2.csv,sha256=T_GiWxA81SN63_tMOIuemcvboEFeAmbKc7xRXvL9esw,287
43
- ingestr-0.6.5.dist-info/METADATA,sha256=WmTysf7rKdSEh5XQILpzOVrF43VsoRtgxzL0XaAEoYA,5699
44
- ingestr-0.6.5.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
45
- ingestr-0.6.5.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
46
- ingestr-0.6.5.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
47
- ingestr-0.6.5.dist-info/RECORD,,
40
+ ingestr-0.7.0.dist-info/METADATA,sha256=MmQ_futv2ZZbVg4hdbCErSAgs2AobplIUr4vMErTXEI,5829
41
+ ingestr-0.7.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
42
+ ingestr-0.7.0.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
43
+ ingestr-0.7.0.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
44
+ ingestr-0.7.0.dist-info/RECORD,,