thds.tabularasa 0.14.3__py3-none-any.whl → 0.14.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -44,7 +44,6 @@ from thds.tabularasa.schema.util import all_predecessors, all_successors
44
44
  try:
45
45
  from bourbaki.application.cli import CommandLineInterface, cli_spec
46
46
  except ImportError:
47
-
48
47
  # stand-in decorators
49
48
  def noop_decorator(obj):
50
49
  return obj
@@ -84,7 +83,6 @@ else:
84
83
  try:
85
84
  from ruamel.yaml import YAML
86
85
  except ImportError:
87
-
88
86
  import yaml
89
87
 
90
88
  load_yaml = yaml.safe_load
@@ -578,9 +576,9 @@ class ReferenceDataManager:
578
576
  package=self.package,
579
577
  )
580
578
  elif target == CompilationTarget.attrs_sqlite:
581
- assert (
582
- self.sqlite_db_path is not None
583
- ), "Must specify sqlite db path in build options to generate sqlite interface"
579
+ assert self.sqlite_db_path is not None, (
580
+ "Must specify sqlite db path in build options to generate sqlite interface"
581
+ )
584
582
  renderer = partial(
585
583
  render_attrs_sqlite_schema,
586
584
  package=self.package,
@@ -595,9 +593,9 @@ class ReferenceDataManager:
595
593
  @output_handler(print_file_hashes_status)
596
594
  def check_hashes(self) -> Dict[str, DataFileHashes]:
597
595
  """Check actual hashes of on-disk built data files against those documented in the schema"""
598
- assert (
599
- self.package_data_dir is not None and self.transient_data_dir is not None
600
- ), "Can't check hashes without package data dirs"
596
+ assert self.package_data_dir is not None and self.transient_data_dir is not None, (
597
+ "Can't check hashes without package data dirs"
598
+ )
601
599
  hashes = {}
602
600
  for table in self.schema.build_time_package_tables:
603
601
  name = table.name
@@ -750,9 +748,9 @@ class ReferenceDataManager:
750
748
  ensure hashes embedded in source code are up-to-date. By default, this runs when any hashes are
751
749
  updated in the config file.
752
750
  """
753
- assert (
754
- self.package_data_dir is not None and self.transient_data_dir is not None
755
- ), "Can't update hashes without package data dirs"
751
+ assert self.package_data_dir is not None and self.transient_data_dir is not None, (
752
+ "Can't update hashes without package data dirs"
753
+ )
756
754
  hashes_updated = []
757
755
  tables_to_update = (
758
756
  [self.schema.tables[t] for t in tables] if tables else self.schema.build_time_package_tables
@@ -181,15 +181,15 @@ class ReferenceDataBuildCommand(setuptools.command.build_py.build_py):
181
181
 
182
182
  if self.options.sqlite_data:
183
183
  # now initialize database and load reference data into database
184
- assert (
185
- self.options.sqlite_db_path is not None
186
- ), "No sqlite_db_path specified; can't populate db"
187
- assert (
188
- self.options.package_data_dir is not None
189
- ), "No package_data_dir specified; can't populate db"
190
- assert (
191
- self.options.transient_data_dir is not None
192
- ), "No transient_data_dir specified; can't populate db"
184
+ assert self.options.sqlite_db_path is not None, (
185
+ "No sqlite_db_path specified; can't populate db"
186
+ )
187
+ assert self.options.package_data_dir is not None, (
188
+ "No package_data_dir specified; can't populate db"
189
+ )
190
+ assert self.options.transient_data_dir is not None, (
191
+ "No transient_data_dir specified; can't populate db"
192
+ )
193
193
  populate_sqlite_db(
194
194
  self.schema,
195
195
  db_package=self.package_name,
@@ -28,7 +28,9 @@ class NullabilityDiff(enum.IntEnum):
28
28
  return (
29
29
  NullabilityDiff.NO_CHANGE
30
30
  if nullable_before == nullable_after
31
- else NullabilityDiff.NOT_NULL if nullable_before else NullabilityDiff.NULL
31
+ else NullabilityDiff.NOT_NULL
32
+ if nullable_before
33
+ else NullabilityDiff.NULL
32
34
  )
33
35
 
34
36
 
@@ -47,7 +49,9 @@ class OrderedDiff(enum.IntEnum):
47
49
  return (
48
50
  OrderedDiff.NO_CHANGE
49
51
  if ordered_before == ordered_after
50
- else OrderedDiff.UNORDERED if ordered_before else OrderedDiff.ORDERED
52
+ else OrderedDiff.UNORDERED
53
+ if ordered_before
54
+ else OrderedDiff.ORDERED
51
55
  )
52
56
 
53
57
 
@@ -244,6 +244,8 @@ def markdown_dataframe_diff_summary(
244
244
  )
245
245
  col_heading = True
246
246
  yield markdown_heading(heading_level + 3, f"{kind}:")
247
- yield counts.to_frame("count").reset_index().to_markdown(
248
- index=False, tablefmt=tablefmt
247
+ yield (
248
+ counts.to_frame("count")
249
+ .reset_index()
250
+ .to_markdown(index=False, tablefmt=tablefmt)
249
251
  )
@@ -44,8 +44,10 @@ from .parquet_util import (
44
44
  # used here but they're imported for backward compatibility with
45
45
  # existing generated code, which expects it to be importable from
46
46
  # here. They were moved to sqlite_util to reduce the size of this file.
47
- from .sqlite_util import AttrsSQLiteDatabase # noqa: F401
48
- from .sqlite_util import sqlite_constructor_for_record_type # noqa: F401
47
+ from .sqlite_util import ( # noqa: F401
48
+ AttrsSQLiteDatabase,
49
+ sqlite_constructor_for_record_type,
50
+ )
49
51
 
50
52
  T = TypeVar("T")
51
53
  K = TypeVar("K", bound=PyType)
@@ -197,12 +199,12 @@ class _PackageDataOrFileInterface:
197
199
  f"with local path {self.data_path}"
198
200
  )
199
201
  else:
200
- assert (
201
- self.md5 is not None
202
- ), f"No md5 defined for {self.data_path}; can't safely sync blob"
202
+ assert self.md5 is not None, (
203
+ f"No md5 defined for {self.data_path}; can't safely sync blob"
204
+ )
203
205
  target_local_path = self.file_path(sync=False)
204
206
  getLogger(__name__).info(
205
- f"Syncing blob with hash {self.md5}" f" to {target_local_path}" if link else ""
207
+ f"Syncing blob with hash {self.md5} to {target_local_path}" if link else ""
206
208
  )
207
209
  remote_data_spec = self.blob_store.data_spec(self.md5)
208
210
  local_files = sync_adls_data(remote_data_spec)
@@ -1,48 +1,48 @@
1
1
  import logging
2
2
  import os.path
3
+ import subprocess
3
4
  import tempfile
4
5
  from functools import lru_cache
5
- from typing import Any, Callable, List, Optional, Tuple
6
6
  from warnings import warn
7
7
 
8
8
 
9
9
  @lru_cache
10
- def __autoformat_imports() -> Tuple[Optional[Any], Optional[Callable[[List[str]], int]]]:
10
+ def _ruff_available() -> bool:
11
11
  try:
12
- import black
13
- except ImportError:
12
+ subprocess.run(["ruff", "--version"], capture_output=True, check=True)
13
+ return True
14
+ except (FileNotFoundError, subprocess.CalledProcessError):
14
15
  warn(
15
- "`black` is unavailable; generated python code will not be auto-formatted. "
16
+ "`ruff` is unavailable; generated python code will not be auto-formatted. "
16
17
  "Specify the 'cli' extra to ensure this dependency is present."
17
18
  )
18
- black = None # type: ignore
19
- try:
20
- from isort.main import main as isort_main # type: ignore
21
- except ImportError:
22
- warn(
23
- "`isort` is unavailable; imports in generated python code will not be automatically sorted. "
24
- "Specify the 'cli' extra to ensure this dependency is present."
25
- )
26
- isort_main = None # type: ignore
27
- return black, isort_main # type: ignore
19
+ return False
28
20
 
29
21
 
30
22
  def autoformat(py_code: str) -> str:
31
23
  _LOGGER = logging.getLogger(__name__)
24
+ if not _ruff_available():
25
+ return py_code
26
+
32
27
  try:
33
- black, isort_main = __autoformat_imports()
34
- if black is not None:
35
- _LOGGER.info("Applying `black` formatting to auto-generated code")
36
- py_code = black.format_str(py_code, mode=black.FileMode())
37
- if isort_main is not None:
38
- _LOGGER.info("Applying `isort` formatting to auto-generated code")
39
- with tempfile.TemporaryDirectory() as d:
40
- outfile = os.path.join(d, "tmp.py")
41
- with open(outfile, "w") as f:
42
- f.write(py_code)
43
- isort_main([outfile, "--profile", "black"])
44
- with open(outfile, "r") as f_:
45
- py_code = f_.read()
28
+ with tempfile.TemporaryDirectory() as d:
29
+ outfile = os.path.join(d, "tmp.py")
30
+ with open(outfile, "w") as f:
31
+ f.write(py_code)
32
+
33
+ _LOGGER.info("Applying `ruff` import sorting to auto-generated code")
34
+ subprocess.run(
35
+ ["ruff", "check", "--select", "I", "--fix", outfile],
36
+ capture_output=True,
37
+ check=False,
38
+ )
39
+
40
+ _LOGGER.info("Applying `ruff` formatting to auto-generated code")
41
+ subprocess.run(["ruff", "format", outfile], capture_output=True, check=True)
42
+
43
+ with open(outfile, "r") as f_:
44
+ py_code = f_.read()
45
+
46
46
  return py_code
47
47
  except Exception as ex:
48
48
  print(f"{repr(ex)} when attempting to format code:")
@@ -63,7 +63,7 @@ def pyarrow_field_literal(field: pa.Field) -> str:
63
63
  else:
64
64
  return (
65
65
  f'{pa.__name__}.field(\n "{field.name}",\n'
66
- f'{indent(pyarrow_type_literal(field.type), " ")},\n nullable={field.nullable!r},\n)'
66
+ f"{indent(pyarrow_type_literal(field.type), ' ')},\n nullable={field.nullable!r},\n)"
67
67
  )
68
68
 
69
69
 
@@ -31,7 +31,7 @@ def render_sql_index_schema(table: metaschema.Table) -> Optional[str]:
31
31
  if table.primary_key:
32
32
  table_constraints = (
33
33
  f"CREATE UNIQUE INDEX {index_name(table.snake_case_name, *table.primary_key)} ON "
34
- f'{table.snake_case_name}({", ".join(table.primary_key)});'
34
+ f"{table.snake_case_name}({', '.join(table.primary_key)});"
35
35
  )
36
36
  index_defs.append(table_constraints)
37
37
 
@@ -39,7 +39,7 @@ def render_sql_index_schema(table: metaschema.Table) -> Optional[str]:
39
39
  unique = "UNIQUE " if frozenset(index) in unique_constraints else ""
40
40
  index_def = (
41
41
  f"CREATE {unique}INDEX {index_name(table.snake_case_name, *index)} "
42
- f'ON {table.snake_case_name}({", ".join(index)});'
42
+ f"ON {table.snake_case_name}({', '.join(index)});"
43
43
  )
44
44
  index_defs.append(index_def)
45
45
 
@@ -42,7 +42,7 @@ def _dict_literal(named_exprs: Iterable[Tuple[str, str]], linebreak: bool = True
42
42
  except StopIteration:
43
43
  return "{}"
44
44
  else:
45
- return f"dict({start}{sep.join(map(keyval, itertools.chain((peek,), named_exprs)))}{end})" ""
45
+ return f"dict({start}{sep.join(map(keyval, itertools.chain((peek,), named_exprs)))}{end})"
46
46
 
47
47
 
48
48
  def _indent(expr: str, level: int = 1, first_line: bool = False) -> str:
@@ -255,7 +255,9 @@ class MatchesRegex(StrConstraint):
255
255
 
256
256
  def check_fn(s: pd.Series):
257
257
  return s.str.fullmatch(
258
- re.compile(self.matches.pattern), case=self.case_sensitive, na=True # type: ignore[arg-type]
258
+ re.compile(self.matches.pattern), # type: ignore[arg-type]
259
+ case=self.case_sensitive,
260
+ na=True,
259
261
  )
260
262
 
261
263
  else:
@@ -301,7 +303,7 @@ class EnumConstraint(ColumnConstraint):
301
303
  return pa.Check.isin(self.enum)
302
304
 
303
305
  def sqlite_check_expr(self, colname: str) -> str:
304
- return f'{colname} IN ({", ".join(map(repr, self.enum))})'
306
+ return f"{colname} IN ({', '.join(map(repr, self.enum))})"
305
307
 
306
308
  def applies_to(self, dtype: DType) -> bool:
307
309
  if not self.enum:
@@ -297,7 +297,7 @@ class UniqueColumnsConstraint(BaseModel, extra=Extra.forbid):
297
297
 
298
298
  @property
299
299
  def sqlite(self) -> str:
300
- return f'UNIQUE ({", ".join(self.unique)})'
300
+ return f"UNIQUE ({', '.join(self.unique)})"
301
301
 
302
302
  @staticmethod
303
303
  def make_pandera_check_expr(unique: IdTuple) -> str:
@@ -1,6 +1,7 @@
1
1
  import io
2
2
  import itertools
3
3
  import os
4
+ from _warnings import warn
4
5
  from collections import Counter
5
6
  from functools import lru_cache
6
7
  from pathlib import Path
@@ -9,7 +10,6 @@ from typing import Any, Collection, Dict, List, Mapping, Optional, Set, Tuple, T
9
10
  import networkx as nx
10
11
  import pkg_resources
11
12
  import yaml
12
- from _warnings import warn
13
13
 
14
14
  from .. import git_util
15
15
  from .constraints import AnyColumnConstraint, EnumConstraint
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: thds.tabularasa
3
- Version: 0.14.3
3
+ Version: 0.14.4
4
4
  Summary: Trilliant Health reference data build system.
5
5
  Author-email: Trilliant Health <info@trillianthealth.com>
6
6
  Project-URL: Repository, https://github.com/TrilliantHealth/ds-monorepo
@@ -1,5 +1,5 @@
1
1
  thds/tabularasa/__init__.py,sha256=jc6w1WD868MQ2t4wkRNYvRssojwXvPDcNyC8V5gwbl0,169
2
- thds/tabularasa/__main__.py,sha256=DlaUfXu03tbBVucRuMw7354LeBs8d5tRCuAprZs0XYs,47778
2
+ thds/tabularasa/__main__.py,sha256=ooLtYWjH30oGHhOg1UFTpwTICG30_9h88JnRE0j0QBs,47776
3
3
  thds/tabularasa/compat.py,sha256=j0313TPIXtkbfvRI0AH4if8GLrjQSrDJ9heayCIl9w8,1037
4
4
  thds/tabularasa/git_util.py,sha256=fBFhaCPi_5W2BpG2B3WiPcAWJvuVI_pG47rt73wLO6E,1388
5
5
  thds/tabularasa/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -8,41 +8,41 @@ thds/tabularasa/sqlite_from_parquet.py,sha256=yJatUIAbgErHUOL5dhchWJwzKZCrDrx93S
8
8
  thds/tabularasa/to_sqlite.py,sha256=5lcEUh38MNebxAJdLp2XGWOP_WQDIADtL1fyhOvi9UU,1715
9
9
  thds/tabularasa/data_dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  thds/tabularasa/data_dependencies/adls.py,sha256=vJAuc5Key-vO1N6DGo5dj9fIx_4hMALAVC17qhvkT7Y,3257
11
- thds/tabularasa/data_dependencies/build.py,sha256=6iYgw93sOF2Nlnb6WSmA9NvPNwOf_Yyi2wXUQpRVkJM,23382
11
+ thds/tabularasa/data_dependencies/build.py,sha256=sFxlMtjluje5t2TP3oYVV4TdMOv85gdJSBQ4E7R0JuE,23382
12
12
  thds/tabularasa/data_dependencies/sqlite.py,sha256=sMP_NInBEDoH5SScIRYxtOvcPUi9WXfE3_jCoOBduGo,12825
13
13
  thds/tabularasa/data_dependencies/tabular.py,sha256=oq9wFse235ikLEv8Zvol59ptRRojZbkbzXJyQeFfC9o,6529
14
14
  thds/tabularasa/data_dependencies/util.py,sha256=FQ9G1nIpqKh00z2lXOt0Y2R1mLQsEb-BC6Tka1z2egc,8489
15
15
  thds/tabularasa/diff/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  thds/tabularasa/diff/data.py,sha256=J1YK9thlXRn4WYQjCKG5kgkGPPRp2wu9rPjkLbyIi3E,11680
17
- thds/tabularasa/diff/schema.py,sha256=I-RC7yQuLWj5DqxWMWzQgMecT6LanJ2k1bH9-IJELcg,8855
18
- thds/tabularasa/diff/summary.py,sha256=gENtDwhSrDYeN-8fWr6Ug2zgdp584b0pZF9UBYzKFlc,10039
17
+ thds/tabularasa/diff/schema.py,sha256=obd3HwjMB8P6WXk9ETui7HSvJ7xb4r5sOGXEMCl_PZk,8903
18
+ thds/tabularasa/diff/summary.py,sha256=5D5ZsLiJpsiz7spqOjqFJEzG8C-6DD8e-KjJnfzKrPU,10099
19
19
  thds/tabularasa/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  thds/tabularasa/loaders/lazy_adls.py,sha256=jrWy5tTKDQfWEv6aHQ3UJhFzLrDPOlSGsArv9zcl1g8,1375
21
21
  thds/tabularasa/loaders/parquet_util.py,sha256=u75j3PkMSakO2zfq4zksWzXLYnaO--WizAgXTcSpXRY,13354
22
22
  thds/tabularasa/loaders/sqlite_util.py,sha256=etLiV0h1l7NQHaEdNjz0QtqYaRBSm5L6EVWOwnCRv2w,11185
23
- thds/tabularasa/loaders/util.py,sha256=XmsGkDdL6O8R6B4667Iqi5HoRgq0YMs6LP3VvPIqPVU,21369
23
+ thds/tabularasa/loaders/util.py,sha256=2hBUEtforxBxwhpb_xqFRZI2u4zcAYnYbFnkqPWTZTM,21340
24
24
  thds/tabularasa/schema/__init__.py,sha256=bowvNXrrDrWB3TAmwDxCeEAvVEe9z7iRfqRaNg1Qmo4,440
25
- thds/tabularasa/schema/constraints.py,sha256=V2vh01BhYR8OVQvgdujqSi0l_fMJvFKYSlBvWExZFG0,9744
25
+ thds/tabularasa/schema/constraints.py,sha256=hdRpdWdzIZxS7XVKjyA69BPwWgWRxFE6XsO-2oSxfCY,9785
26
26
  thds/tabularasa/schema/dtypes.py,sha256=wHoDXiQoZ5_wC5K6b_TnPkmhtkrSMYmjZQ8TNault0I,4816
27
27
  thds/tabularasa/schema/extract_from_parquet.py,sha256=WBOr0Vr2BFShfIyE3q7yXzgGhtTTyFYi3WV4lqr4MME,4730
28
28
  thds/tabularasa/schema/files.py,sha256=gvDaeM9a0W7SpOBICw2WvWzepaRumu6c5ijAiXFeY_8,7604
29
- thds/tabularasa/schema/metaschema.py,sha256=dPcyfVoeFOooRdOm-oliznU9NUkgkbNhwgFUXd2--eU,36997
29
+ thds/tabularasa/schema/metaschema.py,sha256=yt4ByweXfqHa2dYF8izG3BAhiBw8iQ1Dmmu_UrTdxhM,36997
30
30
  thds/tabularasa/schema/util.py,sha256=3Rv3Mf1-Tw0obmq9vWoc2bQr9sy0viqn48a41c-RK84,4265
31
- thds/tabularasa/schema/validation.py,sha256=5ESI1SCj8wgYiRaDP3oxtLgdbyhOShN_wwD5zSng7js,32193
31
+ thds/tabularasa/schema/validation.py,sha256=Ek3L2r2aSm-X4KiLuLT9RFPaVU_p_iNbLCQJIy4q2zU,32193
32
32
  thds/tabularasa/schema/compilation/__init__.py,sha256=kOX2fFyPsWfidxW1lYs1-oiKH_DsA1nPKalppf7gLWQ,636
33
- thds/tabularasa/schema/compilation/_format.py,sha256=RayqDj-SXTtEFfPLyYvTRC1sL82LNVcOol40w5s8Qz8,1854
33
+ thds/tabularasa/schema/compilation/_format.py,sha256=ibAszFBpHESb1YjiUAYyDrEvUe4lLRKXr5yXKEf1U8k,1521
34
34
  thds/tabularasa/schema/compilation/attrs.py,sha256=u6sTIajs2uEfbsRJcst05C-ekBS_-bsvHwJK8l0Q_ZQ,7749
35
35
  thds/tabularasa/schema/compilation/attrs_sqlite.py,sha256=ayWOXgw7iuZtxkS9CHwjjtjUr4Wn8Vuus3tuiEbBiOk,10148
36
36
  thds/tabularasa/schema/compilation/io.py,sha256=ehhVP0On0XwIm6qWSCO5uo7r5CjpThpBRLYKAqrb40I,3199
37
37
  thds/tabularasa/schema/compilation/pandas.py,sha256=6yxXTTIlcTXsttVh9uRMccetty6OrvdbmRXgbvvHdyA,8176
38
- thds/tabularasa/schema/compilation/pyarrow.py,sha256=pcNQ3a6UPJT1PBj6xHOl99UvZftp1hqV5OFdFbT0OHU,3030
38
+ thds/tabularasa/schema/compilation/pyarrow.py,sha256=G3ItYcK2R2bkBdHaIkml6zT9z72FtR9OJQAK8YnCwyE,3030
39
39
  thds/tabularasa/schema/compilation/sphinx.py,sha256=we5X-ZpCk6WH-8KCXAv6Nklg1JZmnkGPT3V2EHa2_rg,17491
40
- thds/tabularasa/schema/compilation/sqlite.py,sha256=wSrSlVCYeuTpOf9AOHAnp6gJHkjHZhx8UkgkYgfoQVw,2368
41
- thds/tabularasa/schema/compilation/util.py,sha256=YXFe1_yoBobED010hstKIoq-dwLHo6SBv1v1IAw6AYU,3886
40
+ thds/tabularasa/schema/compilation/sqlite.py,sha256=-_xdz8cUkOTOczu3OYoIoL8TNqi6JJ4rAkrEdu6QR2w,2368
41
+ thds/tabularasa/schema/compilation/util.py,sha256=kCJQxjOe_VnQtKFG7p3162HOZFzTTc1eVBhdrRsvYqY,3883
42
42
  thds/tabularasa/testing/__init__.py,sha256=XoLzB-DotxFw9KHt2vfH72k7pyAAFI2bW-qqq6nww1g,85
43
43
  thds/tabularasa/testing/mock_sqlite.py,sha256=xoV4w_GaDgtZf17iUux2-LA6Va1XRJdC2FU34dysh0o,4769
44
- thds_tabularasa-0.14.3.dist-info/METADATA,sha256=WfrgVRGa1Te2GYA4qFHRogd9-H8ZPJRsTNVOwhKbL1Q,26786
45
- thds_tabularasa-0.14.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
46
- thds_tabularasa-0.14.3.dist-info/entry_points.txt,sha256=PX4ShRonjv6lMsVjrGu8RkFzpyyvgM9EnZlNfMomd9k,61
47
- thds_tabularasa-0.14.3.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
48
- thds_tabularasa-0.14.3.dist-info/RECORD,,
44
+ thds_tabularasa-0.14.4.dist-info/METADATA,sha256=8ZjGMMWzgAC0V37yG_EwzXE9qb3_pvScmkNXOQ4xZ4I,26786
45
+ thds_tabularasa-0.14.4.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
46
+ thds_tabularasa-0.14.4.dist-info/entry_points.txt,sha256=PX4ShRonjv6lMsVjrGu8RkFzpyyvgM9EnZlNfMomd9k,61
47
+ thds_tabularasa-0.14.4.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
48
+ thds_tabularasa-0.14.4.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5