TypeDAL 2.4.0__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of TypeDAL might be problematic. Click here for more details.

typedal/__about__.py CHANGED
@@ -5,4 +5,4 @@ This file contains the Version info for this package.
5
5
  # SPDX-FileCopyrightText: 2023-present Robin van der Noord <robinvandernoord@gmail.com>
6
6
  #
7
7
  # SPDX-License-Identifier: MIT
8
- __version__ = "2.4.0"
8
+ __version__ = "3.0.0"
typedal/caching.py CHANGED
@@ -13,6 +13,7 @@ import dill # nosec
13
13
  from pydal.objects import Field, Rows, Set
14
14
 
15
15
  from .core import TypedField, TypedRows, TypedTable
16
+ from .types import Query
16
17
 
17
18
  if typing.TYPE_CHECKING: # pragma: no cover
18
19
  from .core import TypeDAL
@@ -172,7 +173,7 @@ def clear_expired() -> int:
172
173
  By default, expired items are only removed when trying to access them.
173
174
  """
174
175
  now = get_now()
175
- return len(_TypedalCache.where(_TypedalCache.expires_at > now).delete())
176
+ return len(_TypedalCache.where(_TypedalCache.expires_at != None).where(_TypedalCache.expires_at < now).delete())
176
177
 
177
178
 
178
179
  def _remove_cache(s: Set, tablename: str) -> None:
@@ -270,3 +271,137 @@ def load_from_cache(key: str, db: "TypeDAL") -> Any | None:
270
271
  return _load_from_cache(key, db)
271
272
 
272
273
  return None # pragma: no cover
274
+
275
+
276
+ def humanize_bytes(size: int | float) -> str:
277
+ """
278
+ Turn a number of bytes into a human-readable version (e.g. 124 GB).
279
+ """
280
+ if not size:
281
+ return "0"
282
+
283
+ suffixes = ["B", "KB", "MB", "GB", "TB", "PB"] # List of suffixes for different magnitudes
284
+ suffix_index = 0
285
+
286
+ while size > 1024 and suffix_index < len(suffixes) - 1:
287
+ suffix_index += 1
288
+ size /= 1024.0
289
+
290
+ return f"{size:.2f} {suffixes[suffix_index]}"
291
+
292
+
293
+ def _expired_and_valid_query() -> tuple[str, str]:
294
+ expired_items = (
295
+ _TypedalCache.where(lambda row: (row.expires_at < get_now()) & (row.expires_at != None))
296
+ .select(_TypedalCache.id)
297
+ .to_sql()
298
+ )
299
+
300
+ valid_items = _TypedalCache.where(~_TypedalCache.id.belongs(expired_items)).select(_TypedalCache.id).to_sql()
301
+
302
+ return expired_items, valid_items
303
+
304
+
305
+ T = typing.TypeVar("T")
306
+ Stats = typing.TypedDict("Stats", {"total": T, "valid": T, "expired": T})
307
+
308
+ RowStats = typing.TypedDict(
309
+ "RowStats",
310
+ {
311
+ "Dependent Cache Entries": int,
312
+ },
313
+ )
314
+
315
+
316
+ def _row_stats(db: "TypeDAL", table: str, query: Query) -> RowStats:
317
+ count_field = _TypedalCacheDependency.entry.count()
318
+ stats: TypedRows[_TypedalCacheDependency] = db(query & (_TypedalCacheDependency.table == table)).select(
319
+ _TypedalCacheDependency.entry, count_field, groupby=_TypedalCacheDependency.entry
320
+ )
321
+ return {
322
+ "Dependent Cache Entries": len(stats),
323
+ }
324
+
325
+
326
+ def row_stats(db: "TypeDAL", table: str, row_id: str) -> Stats[RowStats]:
327
+ """
328
+ Collect caching stats for a specific table row (by ID).
329
+ """
330
+ expired_items, valid_items = _expired_and_valid_query()
331
+
332
+ query = _TypedalCacheDependency.idx == row_id
333
+
334
+ return {
335
+ "total": _row_stats(db, table, query),
336
+ "valid": _row_stats(db, table, _TypedalCacheDependency.entry.belongs(valid_items) & query),
337
+ "expired": _row_stats(db, table, _TypedalCacheDependency.entry.belongs(expired_items) & query),
338
+ }
339
+
340
+
341
+ TableStats = typing.TypedDict(
342
+ "TableStats",
343
+ {
344
+ "Dependent Cache Entries": int,
345
+ "Associated Table IDs": int,
346
+ },
347
+ )
348
+
349
+
350
+ def _table_stats(db: "TypeDAL", table: str, query: Query) -> TableStats:
351
+ count_field = _TypedalCacheDependency.entry.count()
352
+ stats: TypedRows[_TypedalCacheDependency] = db(query & (_TypedalCacheDependency.table == table)).select(
353
+ _TypedalCacheDependency.entry, count_field, groupby=_TypedalCacheDependency.entry
354
+ )
355
+ return {
356
+ "Dependent Cache Entries": len(stats),
357
+ "Associated Table IDs": sum(stats.column(count_field)),
358
+ }
359
+
360
+
361
+ def table_stats(db: "TypeDAL", table: str) -> Stats[TableStats]:
362
+ """
363
+ Collect caching stats for a table.
364
+ """
365
+ expired_items, valid_items = _expired_and_valid_query()
366
+
367
+ return {
368
+ "total": _table_stats(db, table, _TypedalCacheDependency.id > 0),
369
+ "valid": _table_stats(db, table, _TypedalCacheDependency.entry.belongs(valid_items)),
370
+ "expired": _table_stats(db, table, _TypedalCacheDependency.entry.belongs(expired_items)),
371
+ }
372
+
373
+
374
+ GenericStats = typing.TypedDict(
375
+ "GenericStats",
376
+ {
377
+ "entries": int,
378
+ "dependencies": int,
379
+ "size": str,
380
+ },
381
+ )
382
+
383
+
384
+ def _calculate_stats(db: "TypeDAL", query: Query) -> GenericStats:
385
+ sum_len_field = _TypedalCache.data.len().sum()
386
+ size_row = db(query).select(sum_len_field).first()
387
+
388
+ size = size_row[sum_len_field] if size_row else 0 # type: ignore
389
+
390
+ return {
391
+ "entries": _TypedalCache.where(query).count(),
392
+ "dependencies": db(_TypedalCacheDependency.entry.belongs(query)).count(),
393
+ "size": humanize_bytes(size),
394
+ }
395
+
396
+
397
+ def calculate_stats(db: "TypeDAL") -> Stats[GenericStats]:
398
+ """
399
+ Collect generic caching stats.
400
+ """
401
+ expired_items, valid_items = _expired_and_valid_query()
402
+
403
+ return {
404
+ "total": _calculate_stats(db, _TypedalCache.id > 0),
405
+ "valid": _calculate_stats(db, _TypedalCache.id.belongs(valid_items)),
406
+ "expired": _calculate_stats(db, _TypedalCache.id.belongs(expired_items)),
407
+ }
typedal/cli.py CHANGED
@@ -2,6 +2,7 @@
2
2
  Typer CLI for TypeDAL.
3
3
  """
4
4
 
5
+ import fnmatch
5
6
  import sys
6
7
  import typing
7
8
  import warnings
@@ -13,6 +14,8 @@ from configuraptor import asdict
13
14
  from configuraptor.alias import is_alias
14
15
  from configuraptor.helpers import is_optional
15
16
 
17
+ from .types import AnyDict
18
+
16
19
  try:
17
20
  import edwh_migrate
18
21
  import pydal2sql # noqa: F401
@@ -20,6 +23,7 @@ try:
20
23
  import rich
21
24
  import tomlkit
22
25
  import typer
26
+ from tabulate import tabulate
23
27
  except ImportError as e: # pragma: no cover
24
28
  # ImportWarning is hidden by default
25
29
  warnings.warn(
@@ -39,14 +43,16 @@ from pydal2sql.types import (
39
43
  from pydal2sql_core import core_alter, core_create
40
44
  from typing_extensions import Never
41
45
 
46
+ from . import caching
42
47
  from .__about__ import __version__
43
48
  from .config import TypeDALConfig, _fill_defaults, load_config, transform
49
+ from .core import TypeDAL
44
50
 
45
51
  app = typer.Typer(
46
52
  no_args_is_help=True,
47
53
  )
48
54
 
49
- questionary_types: dict[typing.Hashable, Optional[dict[str, typing.Any]]] = {
55
+ questionary_types: dict[typing.Hashable, Optional[AnyDict]] = {
50
56
  str: {
51
57
  "type": "text",
52
58
  "validate": lambda text: True if len(text) > 0 else "Please enter a value",
@@ -93,7 +99,7 @@ T = typing.TypeVar("T")
93
99
  notfound = object()
94
100
 
95
101
 
96
- def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[dict[str, typing.Any]]: # pragma: no cover
102
+ def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]: # pragma: no cover
97
103
  question = questionary_types.get(prop, notfound)
98
104
  if question is notfound:
99
105
  # None means skip the question, notfound means use the type default!
@@ -148,11 +154,7 @@ def setup(
148
154
 
149
155
  toml_contents = toml_path.read_text()
150
156
  # tomli has native Python types, tomlkit doesn't but preserves comments
151
- toml_obj: dict[str, typing.Any] = tomli.loads(toml_contents)
152
-
153
- if "[tool.typedal]" in toml_contents:
154
- section = toml_obj["tool"]["typedal"]
155
- config.update(**section, _overwrite=True)
157
+ toml_obj: AnyDict = tomli.loads(toml_contents)
156
158
 
157
159
  if "[tool.pydal2sql]" in toml_contents:
158
160
  mapping = {"": ""} # <- placeholder
@@ -170,6 +172,10 @@ def setup(
170
172
 
171
173
  config.update(**extra_config)
172
174
 
175
+ if "[tool.typedal]" in toml_contents:
176
+ section = toml_obj["tool"]["typedal"]
177
+ config.update(**section, _overwrite=True)
178
+
173
179
  data = asdict(config, with_top_level_key=False)
174
180
  data["migrate"] = None # determined based on existence of input/output file.
175
181
 
@@ -203,7 +209,7 @@ def setup(
203
209
  transform(data, prop)
204
210
 
205
211
  with toml_path.open("r") as f:
206
- old_contents: dict[str, typing.Any] = tomlkit.load(f)
212
+ old_contents: AnyDict = tomlkit.load(f)
207
213
 
208
214
  if "tool" not in old_contents:
209
215
  old_contents["tool"] = {}
@@ -220,9 +226,10 @@ def setup(
220
226
  rich.print(f"[green]Wrote updated config to {toml_path}![/green]")
221
227
 
222
228
 
223
- @app.command()
229
+ @app.command(name="migrations.generate")
224
230
  @with_exit_code(hide_tb=IS_DEBUG)
225
231
  def generate_migrations(
232
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
226
233
  filename_before: OptionalArgument[str] = None,
227
234
  filename_after: OptionalArgument[str] = None,
228
235
  dialect: DBType_Option = None,
@@ -239,7 +246,7 @@ def generate_migrations(
239
246
  """
240
247
  # 1. choose CREATE or ALTER based on whether 'output' exists?
241
248
  # 2. pass right args based on 'config' to function chosen in 1.
242
- generic_config = load_config()
249
+ generic_config = load_config(connection)
243
250
  pydal2sql_config = generic_config.to_pydal2sql()
244
251
  pydal2sql_config.update(
245
252
  magic=magic,
@@ -250,6 +257,7 @@ def generate_migrations(
250
257
  format=output_format,
251
258
  input=filename_before,
252
259
  output=output_file,
260
+ _skip_none=True,
253
261
  )
254
262
 
255
263
  if pydal2sql_config.output and Path(pydal2sql_config.output).exists():
@@ -289,9 +297,10 @@ def generate_migrations(
289
297
  )
290
298
 
291
299
 
292
- @app.command()
300
+ @app.command(name="migrations.run")
293
301
  @with_exit_code(hide_tb=IS_DEBUG)
294
302
  def run_migrations(
303
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
295
304
  migrations_file: OptionalArgument[str] = None,
296
305
  db_uri: Optional[str] = None,
297
306
  db_folder: Optional[str] = None,
@@ -311,7 +320,7 @@ def run_migrations(
311
320
  # 1. build migrate Config from TypeDAL config
312
321
  # 2. import right file
313
322
  # 3. `activate_migrations`
314
- generic_config = load_config()
323
+ generic_config = load_config(connection)
315
324
  migrate_config = generic_config.to_migrate()
316
325
 
317
326
  migrate_config.update(
@@ -326,6 +335,7 @@ def run_migrations(
326
335
  create_flag_location=create_flag_location,
327
336
  db_folder=db_folder,
328
337
  migrations_file=migrations_file,
338
+ _skip_none=True,
329
339
  )
330
340
 
331
341
  if dry_run:
@@ -335,6 +345,220 @@ def run_migrations(
335
345
  return True
336
346
 
337
347
 
348
+ def match_strings(patterns: list[str] | str, string_list: list[str]) -> list[str]:
349
+ """
350
+ Glob but on a list of strings.
351
+ """
352
+ if isinstance(patterns, str):
353
+ patterns = [patterns]
354
+
355
+ matches = []
356
+ for pattern in patterns:
357
+ matches.extend([s for s in string_list if fnmatch.fnmatch(s, pattern)])
358
+
359
+ return matches
360
+
361
+
362
+ @app.command(name="migrations.fake")
363
+ @with_exit_code(hide_tb=IS_DEBUG)
364
+ def fake_migrations(
365
+ names: typing.Annotated[list[str], typer.Argument()] = None,
366
+ all: bool = False, # noqa: A002
367
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
368
+ migrations_file: Optional[str] = None,
369
+ db_uri: Optional[str] = None,
370
+ db_folder: Optional[str] = None,
371
+ migrate_table: Optional[str] = None,
372
+ dry_run: bool = False,
373
+ ) -> int:
374
+ """
375
+ Mark one or more migrations as completed in the database, without executing the SQL code.
376
+
377
+ glob is supported in 'names'
378
+ """
379
+ if not (names or all):
380
+ rich.print("Please provide one or more migration names, or pass --all to fake all.")
381
+ return 1
382
+
383
+ generic_config = load_config(connection)
384
+ migrate_config = generic_config.to_migrate()
385
+
386
+ migrate_config.update(
387
+ migrate_uri=db_uri,
388
+ migrate_table=migrate_table,
389
+ db_folder=db_folder,
390
+ migrations_file=migrations_file,
391
+ _skip_none=True,
392
+ )
393
+
394
+ migrations = edwh_migrate.list_migrations(migrate_config)
395
+
396
+ migration_names = list(migrations.keys())
397
+
398
+ to_fake = migration_names if all else match_strings(names or [], migration_names)
399
+
400
+ try:
401
+ db = edwh_migrate.setup_db(config=migrate_config)
402
+ except edwh_migrate.migrate.DatabaseNotYetInitialized:
403
+ db = edwh_migrate.setup_db(
404
+ config=migrate_config, migrate=True, migrate_enabled=True, remove_migrate_tablefile=True
405
+ )
406
+
407
+ previously_migrated = (
408
+ db(
409
+ db.ewh_implemented_features.name.belongs(to_fake)
410
+ & (db.ewh_implemented_features.installed == True) # noqa E712
411
+ )
412
+ .select(db.ewh_implemented_features.name)
413
+ .column("name")
414
+ )
415
+
416
+ if dry_run:
417
+ rich.print("Would migrate these:", [_ for _ in to_fake if _ not in previously_migrated])
418
+ return 0
419
+
420
+ n = len(to_fake)
421
+ print(f"{len(previously_migrated)} / {n} were already installed.")
422
+
423
+ for name in to_fake:
424
+ if name in previously_migrated:
425
+ continue
426
+
427
+ edwh_migrate.mark_migration(db, name=name, installed=True)
428
+
429
+ db.commit()
430
+ rich.print(f"Faked {n} new migrations.")
431
+ return 0
432
+
433
+
434
+ AnyNestedDict: typing.TypeAlias = dict[str, AnyDict]
435
+
436
+
437
+ def tabulate_data(data: AnyNestedDict) -> None:
438
+ """
439
+ Print a nested dict of data in a nice, human-readable table.
440
+ """
441
+ flattened_data = []
442
+ for key, inner_dict in data.items():
443
+ temp_dict = {"": key}
444
+ temp_dict.update(inner_dict)
445
+ flattened_data.append(temp_dict)
446
+
447
+ # Display the tabulated data from the transposed dictionary
448
+ print(tabulate(flattened_data, headers="keys"))
449
+
450
+
451
+ FormatOptions: typing.TypeAlias = typing.Literal["plaintext", "json", "yaml", "toml"]
452
+
453
+
454
+ def get_output_format(fmt: FormatOptions) -> typing.Callable[[AnyNestedDict], None]:
455
+ """
456
+ This function takes a format option as input and \
457
+ returns a function that can be used to output data in the specified format.
458
+ """
459
+ match fmt:
460
+ case "plaintext":
461
+ output = tabulate_data
462
+ case "json":
463
+
464
+ def output(_data: AnyDict | AnyNestedDict) -> None:
465
+ import json
466
+
467
+ print(json.dumps(_data, indent=2))
468
+
469
+ case "yaml":
470
+
471
+ def output(_data: AnyDict | AnyNestedDict) -> None:
472
+ import yaml
473
+
474
+ print(yaml.dump(_data))
475
+
476
+ case "toml":
477
+
478
+ def output(_data: AnyDict | AnyNestedDict) -> None:
479
+ import tomli_w
480
+
481
+ print(tomli_w.dumps(_data))
482
+
483
+ case _:
484
+ options = typing.get_args(FormatOptions)
485
+ raise ValueError(f"Invalid format '{fmt}'. Please choose one of {options}.")
486
+
487
+ return output
488
+
489
+
490
+ @app.command(name="cache.stats")
491
+ @with_exit_code(hide_tb=IS_DEBUG)
492
+ def cache_stats(
493
+ identifier: typing.Annotated[str, typer.Argument()] = "",
494
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
495
+ fmt: typing.Annotated[
496
+ str, typer.Option("--format", "--fmt", "-f", help="plaintext (default) or json")
497
+ ] = "plaintext",
498
+ ) -> None:
499
+ """
500
+ Collect caching stats.
501
+
502
+ Examples:
503
+ typedal cache.stats
504
+ typedal cache.stats user
505
+ typedal cache.stats user.3
506
+ """
507
+ config = load_config(connection)
508
+ db = TypeDAL(config=config, migrate=False, fake_migrate=False)
509
+
510
+ output = get_output_format(typing.cast(FormatOptions, fmt))
511
+
512
+ data: AnyDict
513
+ parts = identifier.split(".")
514
+ match parts:
515
+ case [] | [""]:
516
+ # generic stats
517
+ data = caching.calculate_stats(db) # type: ignore
518
+ case [table]:
519
+ # table stats
520
+ data = caching.table_stats(db, table) # type: ignore
521
+ case [table, row_id]:
522
+ # row stats
523
+ data = caching.row_stats(db, table, row_id) # type: ignore
524
+ case _:
525
+ raise ValueError("Please use the format `table` or `table.id` for this command.")
526
+
527
+ output(data)
528
+
529
+ # todo:
530
+ # - sort by most dependencies
531
+ # - sort by biggest data
532
+ # - include size for table_stats, row_stats
533
+ # - group by table
534
+
535
+
536
+ @app.command(name="cache.clear")
537
+ @with_exit_code(hide_tb=IS_DEBUG)
538
+ def cache_clear(
539
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
540
+ purge: typing.Annotated[bool, typer.Option("--all", "--purge", "-p")] = False,
541
+ ) -> None:
542
+ """
543
+ Clear (expired) items from the cache.
544
+
545
+ Args:
546
+ connection (optional): [tool.typedal.<connection>]
547
+ purge (default: no): remove all items, not only expired
548
+ """
549
+ config = load_config(connection)
550
+ db = TypeDAL(config=config, migrate=False, fake_migrate=False)
551
+
552
+ if purge:
553
+ caching.clear_cache()
554
+ print("Emptied cache")
555
+ else:
556
+ n = caching.clear_expired()
557
+ print(f"Removed {n} expired from cache")
558
+
559
+ db.commit()
560
+
561
+
338
562
  def version_callback() -> Never:
339
563
  """
340
564
  --version requested!
typedal/config.py CHANGED
@@ -10,11 +10,13 @@ from collections import defaultdict
10
10
  from pathlib import Path
11
11
  from typing import Any, Optional
12
12
 
13
- import black.files
14
13
  import tomli
15
14
  from configuraptor import TypedConfig, alias
15
+ from configuraptor.helpers import find_pyproject_toml
16
16
  from dotenv import dotenv_values, find_dotenv
17
17
 
18
+ from .types import AnyDict
19
+
18
20
  if typing.TYPE_CHECKING: # pragma: no cover
19
21
  from edwh_migrate import Config as MigrateConfig
20
22
  from pydal2sql.typer_support import Config as P2SConfig
@@ -62,11 +64,7 @@ class TypeDALConfig(TypedConfig):
62
64
  db_type: str = alias("dialect")
63
65
  db_folder: str = alias("folder")
64
66
 
65
- def __repr__(self) -> str:
66
- """
67
- Dump the config to a (fancy) string.
68
- """
69
- return f"<TypeDAL {self.__dict__}>"
67
+ # repr set by @beautify (by inheriting from TypedConfig)
70
68
 
71
69
  def to_pydal2sql(self) -> "P2SConfig":
72
70
  """
@@ -124,14 +122,8 @@ class TypeDALConfig(TypedConfig):
124
122
  )
125
123
 
126
124
 
127
- def find_pyproject_toml(directory: str | None = None) -> typing.Optional[str]:
128
- """
129
- Find the project's config toml, looks up until it finds the project root (black's logic).
130
- """
131
- return black.files.find_pyproject_toml((directory or os.getcwd(),))
132
-
133
125
 
134
- def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
126
+ def _load_toml(path: str | bool | None = True) -> tuple[str, AnyDict]:
135
127
  """
136
128
  Path can be a file, a directory, a bool or None.
137
129
 
@@ -147,7 +139,7 @@ def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
147
139
  elif Path(str(path)).is_file():
148
140
  toml_path = str(path)
149
141
  else:
150
- toml_path = find_pyproject_toml(str(path))
142
+ toml_path = find_pyproject_toml(path)
151
143
 
152
144
  if not toml_path:
153
145
  # nothing to load
@@ -157,13 +149,13 @@ def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
157
149
  with open(toml_path, "rb") as f:
158
150
  data = tomli.load(f)
159
151
 
160
- return toml_path or "", typing.cast(dict[str, Any], data["tool"]["typedal"])
152
+ return str(toml_path) or "", typing.cast(AnyDict, data["tool"]["typedal"])
161
153
  except Exception as e:
162
154
  warnings.warn(f"Could not load typedal config toml: {e}", source=e)
163
- return toml_path or "", {}
155
+ return str(toml_path) or "", {}
164
156
 
165
157
 
166
- def _load_dotenv(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
158
+ def _load_dotenv(path: str | bool | None = True) -> tuple[str, AnyDict]:
167
159
  fallback_data = {k.lower().removeprefix("typedal_"): v for k, v in os.environ.items()}
168
160
  if path is False:
169
161
  dotenv_path = None
@@ -203,7 +195,7 @@ def get_db_for_alias(db_name: str) -> str:
203
195
  return DB_ALIASES.get(db_name, db_name)
204
196
 
205
197
 
206
- DEFAULTS: dict[str, Any | typing.Callable[[dict[str, Any]], Any]] = {
198
+ DEFAULTS: dict[str, Any | typing.Callable[[AnyDict], Any]] = {
207
199
  "database": lambda data: data.get("db_uri") or "sqlite:memory",
208
200
  "dialect": lambda data: (
209
201
  get_db_for_alias(data["database"].split(":")[0]) if ":" in data["database"] else data.get("db_type")
@@ -217,14 +209,14 @@ DEFAULTS: dict[str, Any | typing.Callable[[dict[str, Any]], Any]] = {
217
209
  }
218
210
 
219
211
 
220
- def _fill_defaults(data: dict[str, Any], prop: str, fallback: Any = None) -> None:
212
+ def _fill_defaults(data: AnyDict, prop: str, fallback: Any = None) -> None:
221
213
  default = DEFAULTS.get(prop, fallback)
222
214
  if callable(default):
223
215
  default = default(data)
224
216
  data[prop] = default
225
217
 
226
218
 
227
- def fill_defaults(data: dict[str, Any], prop: str) -> None:
219
+ def fill_defaults(data: AnyDict, prop: str) -> None:
228
220
  """
229
221
  Fill missing property defaults with (calculated) sane defaults.
230
222
  """
@@ -232,7 +224,7 @@ def fill_defaults(data: dict[str, Any], prop: str) -> None:
232
224
  _fill_defaults(data, prop)
233
225
 
234
226
 
235
- TRANSFORMS: dict[str, typing.Callable[[dict[str, Any]], Any]] = {
227
+ TRANSFORMS: dict[str, typing.Callable[[AnyDict], Any]] = {
236
228
  "database": lambda data: (
237
229
  data["database"]
238
230
  if (":" in data["database"] or not data.get("dialect"))
@@ -241,7 +233,7 @@ TRANSFORMS: dict[str, typing.Callable[[dict[str, Any]], Any]] = {
241
233
  }
242
234
 
243
235
 
244
- def transform(data: dict[str, Any], prop: str) -> bool:
236
+ def transform(data: AnyDict, prop: str) -> bool:
245
237
  """
246
238
  After the user has chosen a value, possibly transform it.
247
239
  """
@@ -281,7 +273,7 @@ def expand_posix_vars(posix_expr: str, context: dict[str, str]) -> str:
281
273
  return re.sub(pattern, replace_var, posix_expr)
282
274
 
283
275
 
284
- def expand_env_vars_into_toml_values(toml: dict[str, Any], env: dict[str, Any]) -> None:
276
+ def expand_env_vars_into_toml_values(toml: AnyDict, env: AnyDict) -> None:
285
277
  """
286
278
  Recursively expands POSIX/Docker Compose-like environment variables in a TOML dictionary.
287
279
 
@@ -334,7 +326,10 @@ def expand_env_vars_into_toml_values(toml: dict[str, Any], env: dict[str, Any])
334
326
 
335
327
 
336
328
  def load_config(
337
- _use_pyproject: bool | str | None = True, _use_env: bool | str | None = True, **fallback: Any
329
+ connection_name: Optional[str] = None,
330
+ _use_pyproject: bool | str | None = True,
331
+ _use_env: bool | str | None = True,
332
+ **fallback: Any,
338
333
  ) -> TypeDALConfig:
339
334
  """
340
335
  Combines multiple sources of config into one config instance.
@@ -348,8 +343,8 @@ def load_config(
348
343
 
349
344
  expand_env_vars_into_toml_values(toml, dotenv)
350
345
 
351
- connection_name = dotenv.get("connection", "") or toml.get("default", "")
352
- connection: dict[str, Any] = (toml.get(connection_name) if connection_name else toml) or {}
346
+ connection_name = connection_name or dotenv.get("connection", "") or toml.get("default", "")
347
+ connection: AnyDict = (toml.get(connection_name) if connection_name else toml) or {}
353
348
 
354
349
  combined = connection | dotenv | fallback
355
350
  combined = {k.replace("-", "_"): v for k, v in combined.items()}
typedal/core.py CHANGED
@@ -45,6 +45,7 @@ from .types import (
45
45
  AfterDeleteCallable,
46
46
  AfterInsertCallable,
47
47
  AfterUpdateCallable,
48
+ AnyDict,
48
49
  BeforeDeleteCallable,
49
50
  BeforeInsertCallable,
50
51
  BeforeUpdateCallable,
@@ -353,8 +354,8 @@ class TypeDAL(pydal.DAL): # type: ignore
353
354
  migrate_enabled: bool = True,
354
355
  fake_migrate_all: bool = False,
355
356
  decode_credentials: bool = False,
356
- driver_args: Optional[dict[str, Any]] = None,
357
- adapter_args: Optional[dict[str, Any]] = None,
357
+ driver_args: Optional[AnyDict] = None,
358
+ adapter_args: Optional[AnyDict] = None,
358
359
  attempts: int = 5,
359
360
  auto_import: bool = False,
360
361
  bigint_id: bool = False,
@@ -369,13 +370,15 @@ class TypeDAL(pydal.DAL): # type: ignore
369
370
  enable_typedal_caching: bool = None,
370
371
  use_pyproject: bool | str = True,
371
372
  use_env: bool | str = True,
373
+ connection: Optional[str] = None,
374
+ config: Optional[TypeDALConfig] = None,
372
375
  ) -> None:
373
376
  """
374
377
  Adds some internal tables after calling pydal's default init.
375
378
 
376
379
  Set enable_typedal_caching to False to disable this behavior.
377
380
  """
378
- config = load_config(_use_pyproject=use_pyproject, _use_env=use_env)
381
+ config = config or load_config(connection, _use_pyproject=use_pyproject, _use_env=use_env)
379
382
  config.update(
380
383
  database=uri,
381
384
  dialect=uri.split(":")[0] if uri and ":" in uri else None,
@@ -439,7 +442,7 @@ class TypeDAL(pydal.DAL): # type: ignore
439
442
  # try again:
440
443
  return self.define(model, migrate=True, fake_migrate=True, redefine=True)
441
444
 
442
- default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = {
445
+ default_kwargs: typing.ClassVar[AnyDict] = {
443
446
  # fields are 'required' (notnull) by default:
444
447
  "notnull": True,
445
448
  }
@@ -887,7 +890,7 @@ class TableMeta(type):
887
890
 
888
891
  return str(table._insert(**fields))
889
892
 
890
- def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]":
893
+ def bulk_insert(self: typing.Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
891
894
  """
892
895
  Insert multiple rows, returns a TypedRows set of new instances.
893
896
  """
@@ -896,7 +899,7 @@ class TableMeta(type):
896
899
  return self.where(lambda row: row.id.belongs(result)).collect()
897
900
 
898
901
  def update_or_insert(
899
- self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any
902
+ self: typing.Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any
900
903
  ) -> T_MetaInstance:
901
904
  """
902
905
  Update a row if query matches, else insert a new one.
@@ -1472,7 +1475,7 @@ class TypedTable(metaclass=TableMeta):
1472
1475
  # underscore variants work for class instances (set up by _setup_instance_methods)
1473
1476
 
1474
1477
  @classmethod
1475
- def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]:
1478
+ def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
1476
1479
  """
1477
1480
  Dump the object to a plain dict.
1478
1481
 
@@ -1482,7 +1485,7 @@ class TypedTable(metaclass=TableMeta):
1482
1485
  """
1483
1486
  table = cls._ensure_table_defined()
1484
1487
  result = table.as_dict(flat, sanitize)
1485
- return typing.cast(dict[str, Any], result)
1488
+ return typing.cast(AnyDict, result)
1486
1489
 
1487
1490
  @classmethod
1488
1491
  def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
@@ -1522,7 +1525,7 @@ class TypedTable(metaclass=TableMeta):
1522
1525
 
1523
1526
  def _as_dict(
1524
1527
  self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1525
- ) -> dict[str, Any]:
1528
+ ) -> AnyDict:
1526
1529
  row = self._ensure_matching_row()
1527
1530
 
1528
1531
  result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
@@ -1546,7 +1549,7 @@ class TypedTable(metaclass=TableMeta):
1546
1549
 
1547
1550
  result[relationship] = data
1548
1551
 
1549
- return typing.cast(dict[str, Any], result)
1552
+ return typing.cast(AnyDict, result)
1550
1553
 
1551
1554
  def _as_json(
1552
1555
  self,
@@ -1634,7 +1637,7 @@ class TypedTable(metaclass=TableMeta):
1634
1637
 
1635
1638
  # pickling:
1636
1639
 
1637
- def __getstate__(self) -> dict[str, Any]:
1640
+ def __getstate__(self) -> AnyDict:
1638
1641
  """
1639
1642
  State to save when pickling.
1640
1643
 
@@ -1642,7 +1645,7 @@ class TypedTable(metaclass=TableMeta):
1642
1645
  Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1643
1646
  """
1644
1647
  row = self._ensure_matching_row()
1645
- result: dict[str, Any] = row.as_dict()
1648
+ result: AnyDict = row.as_dict()
1646
1649
 
1647
1650
  if _with := getattr(self, "_with", None):
1648
1651
  result["_with"] = _with
@@ -1654,7 +1657,7 @@ class TypedTable(metaclass=TableMeta):
1654
1657
  result["_row"] = self._row.as_json() if self._row else ""
1655
1658
  return result
1656
1659
 
1657
- def __setstate__(self, state: dict[str, Any]) -> None:
1660
+ def __setstate__(self, state: AnyDict) -> None:
1658
1661
  """
1659
1662
  Used by dill when loading from a bytestring.
1660
1663
  """
@@ -1843,14 +1846,14 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1843
1846
  storage_to_dict: bool = False,
1844
1847
  datetime_to_str: bool = False,
1845
1848
  custom_types: list[type] = None,
1846
- ) -> dict[int, dict[str, Any]]:
1849
+ ) -> dict[int, AnyDict]:
1847
1850
  """
1848
1851
  Get the data in a dict of dicts.
1849
1852
  """
1850
1853
  if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1851
1854
  # functionality not guaranteed
1852
1855
  return typing.cast(
1853
- dict[int, dict[str, Any]],
1856
+ dict[int, AnyDict],
1854
1857
  super().as_dict(
1855
1858
  key or "id",
1856
1859
  compact,
@@ -1882,14 +1885,12 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1882
1885
  storage_to_dict: bool = False,
1883
1886
  datetime_to_str: bool = False,
1884
1887
  custom_types: list[type] = None,
1885
- ) -> list[dict[str, Any]]:
1888
+ ) -> list[AnyDict]:
1886
1889
  """
1887
1890
  Get the data in a list of dicts.
1888
1891
  """
1889
1892
  if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1890
- return typing.cast(
1891
- list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)
1892
- )
1893
+ return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
1893
1894
 
1894
1895
  return [_.as_dict() for _ in self.records.values()]
1895
1896
 
@@ -1993,7 +1994,7 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1993
1994
  """
1994
1995
  return cls(rows, model, metadata=metadata)
1995
1996
 
1996
- def __getstate__(self) -> dict[str, Any]:
1997
+ def __getstate__(self) -> AnyDict:
1997
1998
  """
1998
1999
  Used by dill to dump to bytes (exclude db connection etc).
1999
2000
  """
@@ -2004,7 +2005,7 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
2004
2005
  "colnames": self.colnames,
2005
2006
  }
2006
2007
 
2007
- def __setstate__(self, state: dict[str, Any]) -> None:
2008
+ def __setstate__(self, state: AnyDict) -> None:
2008
2009
  """
2009
2010
  Used by dill when loading from a bytestring.
2010
2011
  """
@@ -2032,7 +2033,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2032
2033
  model: typing.Type[T_MetaInstance]
2033
2034
  query: Query
2034
2035
  select_args: list[Any]
2035
- select_kwargs: dict[str, Any]
2036
+ select_kwargs: AnyDict
2036
2037
  relationships: dict[str, Relationship[Any]]
2037
2038
  metadata: Metadata
2038
2039
 
@@ -2041,7 +2042,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2041
2042
  model: typing.Type[T_MetaInstance],
2042
2043
  add_query: Optional[Query] = None,
2043
2044
  select_args: Optional[list[Any]] = None,
2044
- select_kwargs: Optional[dict[str, Any]] = None,
2045
+ select_kwargs: Optional[AnyDict] = None,
2045
2046
  relationships: dict[str, Relationship[Any]] = None,
2046
2047
  metadata: Metadata = None,
2047
2048
  ):
@@ -2091,7 +2092,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2091
2092
  add_query: Optional[Query] = None,
2092
2093
  overwrite_query: Optional[Query] = None,
2093
2094
  select_args: Optional[list[Any]] = None,
2094
- select_kwargs: Optional[dict[str, Any]] = None,
2095
+ select_kwargs: Optional[AnyDict] = None,
2095
2096
  relationships: dict[str, Relationship[Any]] = None,
2096
2097
  metadata: Metadata = None,
2097
2098
  ) -> "QueryBuilder[T_MetaInstance]":
@@ -2293,7 +2294,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2293
2294
  db = self._get_db()
2294
2295
  return str(db(self.query)._update(**fields))
2295
2296
 
2296
- def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]:
2297
+ def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], AnyDict]:
2297
2298
  select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2298
2299
  select_kwargs = self.select_kwargs.copy()
2299
2300
  query = self.query
@@ -2412,7 +2413,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2412
2413
  self,
2413
2414
  query: Query,
2414
2415
  select_args: list[Any],
2415
- select_kwargs: dict[str, Any],
2416
+ select_kwargs: AnyDict,
2416
2417
  metadata: Metadata,
2417
2418
  ) -> tuple[Query, list[Any]]:
2418
2419
  db = self._get_db()
@@ -2766,7 +2767,7 @@ class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2766
2767
  This class is not actually used, only 'cast' by TypeDAL.__call__
2767
2768
  """
2768
2769
 
2769
- def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int:
2770
+ def count(self, distinct: bool = None, cache: AnyDict = None) -> int:
2770
2771
  """
2771
2772
  Count returns an int.
2772
2773
  """
typedal/for_py4web.py CHANGED
@@ -2,13 +2,12 @@
2
2
  ONLY USE IN COMBINATION WITH PY4WEB!
3
3
  """
4
4
 
5
- from typing import Any
6
-
7
5
  import threadsafevariable
8
6
  from py4web.core import ICECUBE
9
7
  from py4web.core import Fixture as _Fixture
10
8
 
11
9
  from .core import TypeDAL
10
+ from .types import AnyDict
12
11
  from .web2py_py4web_shared import AuthUser
13
12
 
14
13
 
@@ -23,20 +22,20 @@ class DAL(TypeDAL, Fixture): # pragma: no cover
23
22
  Fixture similar to the py4web pydal fixture, but for typedal.
24
23
  """
25
24
 
26
- def on_request(self, _: dict[str, Any]) -> None:
25
+ def on_request(self, _: AnyDict) -> None:
27
26
  """
28
27
  Make sure there is a database connection when a request comes in.
29
28
  """
30
29
  self.get_connection_from_pool_or_new()
31
30
  threadsafevariable.ThreadSafeVariable.restore(ICECUBE)
32
31
 
33
- def on_error(self, _: dict[str, Any]) -> None:
32
+ def on_error(self, _: AnyDict) -> None:
34
33
  """
35
34
  Rollback db on error.
36
35
  """
37
36
  self.recycle_connection_in_pool_or_close("rollback")
38
37
 
39
- def on_success(self, _: dict[str, Any]) -> None:
38
+ def on_success(self, _: AnyDict) -> None:
40
39
  """
41
40
  Commit db on success.
42
41
  """
typedal/helpers.py CHANGED
@@ -8,6 +8,8 @@ import typing
8
8
  from collections import ChainMap
9
9
  from typing import Any
10
10
 
11
+ from .types import AnyDict
12
+
11
13
  T = typing.TypeVar("T")
12
14
 
13
15
 
@@ -30,7 +32,7 @@ def _all_annotations(cls: type) -> ChainMap[str, type]:
30
32
  return ChainMap(*(c.__annotations__ for c in getattr(cls, "__mro__", []) if "__annotations__" in c.__dict__))
31
33
 
32
34
 
33
- def all_dict(cls: type) -> dict[str, Any]:
35
+ def all_dict(cls: type) -> AnyDict:
34
36
  """
35
37
  Get the internal data of a class and all it's parents.
36
38
  """
typedal/types.py CHANGED
@@ -16,6 +16,8 @@ from pydal.objects import Set as _Set
16
16
  from pydal.validators import Validator as _Validator
17
17
  from typing_extensions import NotRequired
18
18
 
19
+ AnyDict: typing.TypeAlias = dict[str, Any]
20
+
19
21
 
20
22
  class Query(_Query): # type: ignore
21
23
  """
@@ -117,7 +119,7 @@ class PaginateDict(TypedDict):
117
119
  Result of PaginatedRows.as_dict().
118
120
  """
119
121
 
120
- data: dict[int, dict[str, Any]]
122
+ data: dict[int, AnyDict]
121
123
  pagination: Pagination
122
124
 
123
125
 
@@ -159,7 +161,7 @@ class Metadata(TypedDict):
159
161
 
160
162
  final_query: NotRequired[Query | str | None]
161
163
  final_args: NotRequired[list[Any]]
162
- final_kwargs: NotRequired[dict[str, Any]]
164
+ final_kwargs: NotRequired[AnyDict]
163
165
  relationships: NotRequired[set[str]]
164
166
 
165
167
  sql: NotRequired[str]
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: TypeDAL
3
- Version: 2.4.0
3
+ Version: 3.0.0
4
4
  Summary: Typing support for PyDAL
5
5
  Project-URL: Documentation, https://typedal.readthedocs.io/
6
6
  Project-URL: Issues, https://github.com/trialandsuccess/TypeDAL/issues
@@ -16,14 +16,15 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
16
16
  Classifier: Programming Language :: Python :: Implementation :: PyPy
17
17
  Requires-Python: >=3.10
18
18
  Requires-Dist: configurable-json
19
- Requires-Dist: configuraptor>=1.24.0
19
+ Requires-Dist: configuraptor>=1.26.2
20
20
  Requires-Dist: dill
21
21
  Requires-Dist: pydal
22
22
  Provides-Extra: all
23
- Requires-Dist: edwh-migrate; extra == 'all'
23
+ Requires-Dist: edwh-migrate>=0.8.0b1; extra == 'all'
24
24
  Requires-Dist: py4web; extra == 'all'
25
- Requires-Dist: pydal2sql[all]; extra == 'all'
25
+ Requires-Dist: pydal2sql[all]>=1.1.3; extra == 'all'
26
26
  Requires-Dist: questionary; extra == 'all'
27
+ Requires-Dist: tabulate; extra == 'all'
27
28
  Requires-Dist: tomlkit; extra == 'all'
28
29
  Requires-Dist: typer; extra == 'all'
29
30
  Provides-Extra: dev
@@ -33,10 +34,13 @@ Requires-Dist: mkdocs-dracula-theme; extra == 'dev'
33
34
  Requires-Dist: pytest-mypy-testing; extra == 'dev'
34
35
  Requires-Dist: python-semantic-release<8; extra == 'dev'
35
36
  Requires-Dist: su6[all]; extra == 'dev'
37
+ Requires-Dist: types-pyyaml; extra == 'dev'
38
+ Requires-Dist: types-tabulate; extra == 'dev'
36
39
  Provides-Extra: migrations
37
- Requires-Dist: edwh-migrate; extra == 'migrations'
38
- Requires-Dist: pydal2sql; extra == 'migrations'
40
+ Requires-Dist: edwh-migrate>=0.8.0b1; extra == 'migrations'
41
+ Requires-Dist: pydal2sql>=1.1.3; extra == 'migrations'
39
42
  Requires-Dist: questionary; extra == 'migrations'
43
+ Requires-Dist: tabulate; extra == 'migrations'
40
44
  Requires-Dist: tomlkit; extra == 'migrations'
41
45
  Requires-Dist: typer; extra == 'migrations'
42
46
  Provides-Extra: py4web
@@ -0,0 +1,18 @@
1
+ typedal/__about__.py,sha256=q5Cn2Nax8QEhpWVeFUyO2PApUwXIgvp5_OADI5f0kMc,206
2
+ typedal/__init__.py,sha256=QQpLiVl9w9hm2LBxey49Y_tCF_VB2bScVaS_mCjYy54,366
3
+ typedal/caching.py,sha256=8UABVAhOlBpL96ykmqhxLaFYOe-XeAh7JoGh57OkxP8,11818
4
+ typedal/cli.py,sha256=5-2U_pQOZNKHmhefiYtkd7g6B0DAXzjf4A1Jh7D37io,18427
5
+ typedal/config.py,sha256=KDJXRsIQuFpSZy5XpSJiC_9WGLlmaOexACW0sWdCw54,11626
6
+ typedal/core.py,sha256=qgJPvlcQYCujsjiiD6SOhWbIr1lxoUDpZUkMnK-mcDQ,95038
7
+ typedal/fields.py,sha256=z2PD9vLWqBR_zXtiY0DthqTG4AeF3yxKoeuVfGXnSdg,5197
8
+ typedal/for_py4web.py,sha256=d07b8hL_PvNDUS26Z5fDH2OxWb-IETBuAFPSzrRwm04,1285
9
+ typedal/for_web2py.py,sha256=zvd5xC-SmuKc0JLDqT3hMIs6COaYnwTFXD_BIeC1vug,1832
10
+ typedal/helpers.py,sha256=BFuGd-1tBA1-QS91C9PEvNY5z5KFHd3gTplxxDWdwSo,6509
11
+ typedal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ typedal/types.py,sha256=1kGkNX6vfGg6ln84AG558C4Zx5ACRz-emrUTnuy-rRY,3410
13
+ typedal/web2py_py4web_shared.py,sha256=cEbjkK0WOS9Q0nTyZuQaJWffeP4bjrL79Bx0xGy_UOs,1504
14
+ typedal/serializers/as_json.py,sha256=ffo152W-sARYXym4BzwX709rrO2-QwKk2KunWY8RNl4,2229
15
+ typedal-3.0.0.dist-info/METADATA,sha256=32JYzSrTHFkQomZ2v2iEAv0MSLx0VOnY-2w2pV36XPQ,7782
16
+ typedal-3.0.0.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87
17
+ typedal-3.0.0.dist-info/entry_points.txt,sha256=m1wqcc_10rHWPdlQ71zEkmJDADUAnZtn7Jac_6mbyUc,44
18
+ typedal-3.0.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.17.1
2
+ Generator: hatchling 1.22.4
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,18 +0,0 @@
1
- typedal/__about__.py,sha256=ld9eMafpOoWREP5zR3MpQlKT_eergtY1xSbMOYaWfZ4,206
2
- typedal/__init__.py,sha256=QQpLiVl9w9hm2LBxey49Y_tCF_VB2bScVaS_mCjYy54,366
3
- typedal/caching.py,sha256=GCeU3m6O9VUOs9zDYlWILrjIEUcbfKJBjSlenkUYaoA,7710
4
- typedal/cli.py,sha256=--FH_WbjOiMFTqPtCwiWT8JHs2FArkFfWePG11w5WL0,11801
5
- typedal/config.py,sha256=wVfBZRFstPN6sC5E2JHYSKL_pj6LPoK5WxCzKJCULGc,11909
6
- typedal/core.py,sha256=DMc9071l1FiKnS7kHZNLdnlGCTb7p0VCJaLVJKGEMhg,95118
7
- typedal/fields.py,sha256=z2PD9vLWqBR_zXtiY0DthqTG4AeF3yxKoeuVfGXnSdg,5197
8
- typedal/for_py4web.py,sha256=xOm-ypTQTmTSzInvVQAf7f7c4XQ1il-fnT7kZoPoKP8,1303
9
- typedal/for_web2py.py,sha256=zvd5xC-SmuKc0JLDqT3hMIs6COaYnwTFXD_BIeC1vug,1832
10
- typedal/helpers.py,sha256=uwO9dl2hGPtSJzDDaK5dXmubX9WLSh2i9pkZRxuOSs0,6488
11
- typedal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- typedal/types.py,sha256=R6XU_tjd6zb8aI6nAXKepi3OEFwwGD8uig6Mzco9-jU,3380
13
- typedal/web2py_py4web_shared.py,sha256=cEbjkK0WOS9Q0nTyZuQaJWffeP4bjrL79Bx0xGy_UOs,1504
14
- typedal/serializers/as_json.py,sha256=ffo152W-sARYXym4BzwX709rrO2-QwKk2KunWY8RNl4,2229
15
- typedal-2.4.0.dist-info/METADATA,sha256=rqEP-7PeHPJ9W33FEP1J4x7el_Fza_w1nobTHBtqZe4,7573
16
- typedal-2.4.0.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
17
- typedal-2.4.0.dist-info/entry_points.txt,sha256=m1wqcc_10rHWPdlQ71zEkmJDADUAnZtn7Jac_6mbyUc,44
18
- typedal-2.4.0.dist-info/RECORD,,