TypeDAL 2.3.6__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of TypeDAL might be problematic. Click here for more details.

typedal/__about__.py CHANGED
@@ -5,4 +5,4 @@ This file contains the Version info for this package.
5
5
  # SPDX-FileCopyrightText: 2023-present Robin van der Noord <robinvandernoord@gmail.com>
6
6
  #
7
7
  # SPDX-License-Identifier: MIT
8
- __version__ = "2.3.6"
8
+ __version__ = "3.0.0"
typedal/caching.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Helpers to facilitate db-based caching.
3
3
  """
4
+
4
5
  import contextlib
5
6
  import hashlib
6
7
  import json
@@ -12,6 +13,7 @@ import dill # nosec
12
13
  from pydal.objects import Field, Rows, Set
13
14
 
14
15
  from .core import TypedField, TypedRows, TypedTable
16
+ from .types import Query
15
17
 
16
18
  if typing.TYPE_CHECKING: # pragma: no cover
17
19
  from .core import TypeDAL
@@ -171,7 +173,7 @@ def clear_expired() -> int:
171
173
  By default, expired items are only removed when trying to access them.
172
174
  """
173
175
  now = get_now()
174
- return len(_TypedalCache.where(_TypedalCache.expires_at > now).delete())
176
+ return len(_TypedalCache.where(_TypedalCache.expires_at != None).where(_TypedalCache.expires_at < now).delete())
175
177
 
176
178
 
177
179
  def _remove_cache(s: Set, tablename: str) -> None:
@@ -269,3 +271,137 @@ def load_from_cache(key: str, db: "TypeDAL") -> Any | None:
269
271
  return _load_from_cache(key, db)
270
272
 
271
273
  return None # pragma: no cover
274
+
275
+
276
+ def humanize_bytes(size: int | float) -> str:
277
+ """
278
+ Turn a number of bytes into a human-readable version (e.g. 124 GB).
279
+ """
280
+ if not size:
281
+ return "0"
282
+
283
+ suffixes = ["B", "KB", "MB", "GB", "TB", "PB"] # List of suffixes for different magnitudes
284
+ suffix_index = 0
285
+
286
+ while size > 1024 and suffix_index < len(suffixes) - 1:
287
+ suffix_index += 1
288
+ size /= 1024.0
289
+
290
+ return f"{size:.2f} {suffixes[suffix_index]}"
291
+
292
+
293
+ def _expired_and_valid_query() -> tuple[str, str]:
294
+ expired_items = (
295
+ _TypedalCache.where(lambda row: (row.expires_at < get_now()) & (row.expires_at != None))
296
+ .select(_TypedalCache.id)
297
+ .to_sql()
298
+ )
299
+
300
+ valid_items = _TypedalCache.where(~_TypedalCache.id.belongs(expired_items)).select(_TypedalCache.id).to_sql()
301
+
302
+ return expired_items, valid_items
303
+
304
+
305
+ T = typing.TypeVar("T")
306
+ Stats = typing.TypedDict("Stats", {"total": T, "valid": T, "expired": T})
307
+
308
+ RowStats = typing.TypedDict(
309
+ "RowStats",
310
+ {
311
+ "Dependent Cache Entries": int,
312
+ },
313
+ )
314
+
315
+
316
+ def _row_stats(db: "TypeDAL", table: str, query: Query) -> RowStats:
317
+ count_field = _TypedalCacheDependency.entry.count()
318
+ stats: TypedRows[_TypedalCacheDependency] = db(query & (_TypedalCacheDependency.table == table)).select(
319
+ _TypedalCacheDependency.entry, count_field, groupby=_TypedalCacheDependency.entry
320
+ )
321
+ return {
322
+ "Dependent Cache Entries": len(stats),
323
+ }
324
+
325
+
326
+ def row_stats(db: "TypeDAL", table: str, row_id: str) -> Stats[RowStats]:
327
+ """
328
+ Collect caching stats for a specific table row (by ID).
329
+ """
330
+ expired_items, valid_items = _expired_and_valid_query()
331
+
332
+ query = _TypedalCacheDependency.idx == row_id
333
+
334
+ return {
335
+ "total": _row_stats(db, table, query),
336
+ "valid": _row_stats(db, table, _TypedalCacheDependency.entry.belongs(valid_items) & query),
337
+ "expired": _row_stats(db, table, _TypedalCacheDependency.entry.belongs(expired_items) & query),
338
+ }
339
+
340
+
341
+ TableStats = typing.TypedDict(
342
+ "TableStats",
343
+ {
344
+ "Dependent Cache Entries": int,
345
+ "Associated Table IDs": int,
346
+ },
347
+ )
348
+
349
+
350
+ def _table_stats(db: "TypeDAL", table: str, query: Query) -> TableStats:
351
+ count_field = _TypedalCacheDependency.entry.count()
352
+ stats: TypedRows[_TypedalCacheDependency] = db(query & (_TypedalCacheDependency.table == table)).select(
353
+ _TypedalCacheDependency.entry, count_field, groupby=_TypedalCacheDependency.entry
354
+ )
355
+ return {
356
+ "Dependent Cache Entries": len(stats),
357
+ "Associated Table IDs": sum(stats.column(count_field)),
358
+ }
359
+
360
+
361
+ def table_stats(db: "TypeDAL", table: str) -> Stats[TableStats]:
362
+ """
363
+ Collect caching stats for a table.
364
+ """
365
+ expired_items, valid_items = _expired_and_valid_query()
366
+
367
+ return {
368
+ "total": _table_stats(db, table, _TypedalCacheDependency.id > 0),
369
+ "valid": _table_stats(db, table, _TypedalCacheDependency.entry.belongs(valid_items)),
370
+ "expired": _table_stats(db, table, _TypedalCacheDependency.entry.belongs(expired_items)),
371
+ }
372
+
373
+
374
+ GenericStats = typing.TypedDict(
375
+ "GenericStats",
376
+ {
377
+ "entries": int,
378
+ "dependencies": int,
379
+ "size": str,
380
+ },
381
+ )
382
+
383
+
384
+ def _calculate_stats(db: "TypeDAL", query: Query) -> GenericStats:
385
+ sum_len_field = _TypedalCache.data.len().sum()
386
+ size_row = db(query).select(sum_len_field).first()
387
+
388
+ size = size_row[sum_len_field] if size_row else 0 # type: ignore
389
+
390
+ return {
391
+ "entries": _TypedalCache.where(query).count(),
392
+ "dependencies": db(_TypedalCacheDependency.entry.belongs(query)).count(),
393
+ "size": humanize_bytes(size),
394
+ }
395
+
396
+
397
+ def calculate_stats(db: "TypeDAL") -> Stats[GenericStats]:
398
+ """
399
+ Collect generic caching stats.
400
+ """
401
+ expired_items, valid_items = _expired_and_valid_query()
402
+
403
+ return {
404
+ "total": _calculate_stats(db, _TypedalCache.id > 0),
405
+ "valid": _calculate_stats(db, _TypedalCache.id.belongs(valid_items)),
406
+ "expired": _calculate_stats(db, _TypedalCache.id.belongs(expired_items)),
407
+ }
typedal/cli.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """
2
2
  Typer CLI for TypeDAL.
3
3
  """
4
+
5
+ import fnmatch
4
6
  import sys
5
7
  import typing
6
8
  import warnings
@@ -12,6 +14,8 @@ from configuraptor import asdict
12
14
  from configuraptor.alias import is_alias
13
15
  from configuraptor.helpers import is_optional
14
16
 
17
+ from .types import AnyDict
18
+
15
19
  try:
16
20
  import edwh_migrate
17
21
  import pydal2sql # noqa: F401
@@ -19,6 +23,7 @@ try:
19
23
  import rich
20
24
  import tomlkit
21
25
  import typer
26
+ from tabulate import tabulate
22
27
  except ImportError as e: # pragma: no cover
23
28
  # ImportWarning is hidden by default
24
29
  warnings.warn(
@@ -38,14 +43,16 @@ from pydal2sql.types import (
38
43
  from pydal2sql_core import core_alter, core_create
39
44
  from typing_extensions import Never
40
45
 
46
+ from . import caching
41
47
  from .__about__ import __version__
42
48
  from .config import TypeDALConfig, _fill_defaults, load_config, transform
49
+ from .core import TypeDAL
43
50
 
44
51
  app = typer.Typer(
45
52
  no_args_is_help=True,
46
53
  )
47
54
 
48
- questionary_types: dict[typing.Hashable, Optional[dict[str, typing.Any]]] = {
55
+ questionary_types: dict[typing.Hashable, Optional[AnyDict]] = {
49
56
  str: {
50
57
  "type": "text",
51
58
  "validate": lambda text: True if len(text) > 0 else "Please enter a value",
@@ -92,7 +99,7 @@ T = typing.TypeVar("T")
92
99
  notfound = object()
93
100
 
94
101
 
95
- def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[dict[str, typing.Any]]: # pragma: no cover
102
+ def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]: # pragma: no cover
96
103
  question = questionary_types.get(prop, notfound)
97
104
  if question is notfound:
98
105
  # None means skip the question, notfound means use the type default!
@@ -147,11 +154,7 @@ def setup(
147
154
 
148
155
  toml_contents = toml_path.read_text()
149
156
  # tomli has native Python types, tomlkit doesn't but preserves comments
150
- toml_obj: dict[str, typing.Any] = tomli.loads(toml_contents)
151
-
152
- if "[tool.typedal]" in toml_contents:
153
- section = toml_obj["tool"]["typedal"]
154
- config.update(**section, _overwrite=True)
157
+ toml_obj: AnyDict = tomli.loads(toml_contents)
155
158
 
156
159
  if "[tool.pydal2sql]" in toml_contents:
157
160
  mapping = {"": ""} # <- placeholder
@@ -169,6 +172,10 @@ def setup(
169
172
 
170
173
  config.update(**extra_config)
171
174
 
175
+ if "[tool.typedal]" in toml_contents:
176
+ section = toml_obj["tool"]["typedal"]
177
+ config.update(**section, _overwrite=True)
178
+
172
179
  data = asdict(config, with_top_level_key=False)
173
180
  data["migrate"] = None # determined based on existence of input/output file.
174
181
 
@@ -202,7 +209,7 @@ def setup(
202
209
  transform(data, prop)
203
210
 
204
211
  with toml_path.open("r") as f:
205
- old_contents: dict[str, typing.Any] = tomlkit.load(f)
212
+ old_contents: AnyDict = tomlkit.load(f)
206
213
 
207
214
  if "tool" not in old_contents:
208
215
  old_contents["tool"] = {}
@@ -219,9 +226,10 @@ def setup(
219
226
  rich.print(f"[green]Wrote updated config to {toml_path}![/green]")
220
227
 
221
228
 
222
- @app.command()
229
+ @app.command(name="migrations.generate")
223
230
  @with_exit_code(hide_tb=IS_DEBUG)
224
231
  def generate_migrations(
232
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
225
233
  filename_before: OptionalArgument[str] = None,
226
234
  filename_after: OptionalArgument[str] = None,
227
235
  dialect: DBType_Option = None,
@@ -238,7 +246,7 @@ def generate_migrations(
238
246
  """
239
247
  # 1. choose CREATE or ALTER based on whether 'output' exists?
240
248
  # 2. pass right args based on 'config' to function chosen in 1.
241
- generic_config = load_config()
249
+ generic_config = load_config(connection)
242
250
  pydal2sql_config = generic_config.to_pydal2sql()
243
251
  pydal2sql_config.update(
244
252
  magic=magic,
@@ -249,6 +257,7 @@ def generate_migrations(
249
257
  format=output_format,
250
258
  input=filename_before,
251
259
  output=output_file,
260
+ _skip_none=True,
252
261
  )
253
262
 
254
263
  if pydal2sql_config.output and Path(pydal2sql_config.output).exists():
@@ -288,9 +297,10 @@ def generate_migrations(
288
297
  )
289
298
 
290
299
 
291
- @app.command()
300
+ @app.command(name="migrations.run")
292
301
  @with_exit_code(hide_tb=IS_DEBUG)
293
302
  def run_migrations(
303
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
294
304
  migrations_file: OptionalArgument[str] = None,
295
305
  db_uri: Optional[str] = None,
296
306
  db_folder: Optional[str] = None,
@@ -310,7 +320,7 @@ def run_migrations(
310
320
  # 1. build migrate Config from TypeDAL config
311
321
  # 2. import right file
312
322
  # 3. `activate_migrations`
313
- generic_config = load_config()
323
+ generic_config = load_config(connection)
314
324
  migrate_config = generic_config.to_migrate()
315
325
 
316
326
  migrate_config.update(
@@ -325,6 +335,7 @@ def run_migrations(
325
335
  create_flag_location=create_flag_location,
326
336
  db_folder=db_folder,
327
337
  migrations_file=migrations_file,
338
+ _skip_none=True,
328
339
  )
329
340
 
330
341
  if dry_run:
@@ -334,6 +345,220 @@ def run_migrations(
334
345
  return True
335
346
 
336
347
 
348
+ def match_strings(patterns: list[str] | str, string_list: list[str]) -> list[str]:
349
+ """
350
+ Glob but on a list of strings.
351
+ """
352
+ if isinstance(patterns, str):
353
+ patterns = [patterns]
354
+
355
+ matches = []
356
+ for pattern in patterns:
357
+ matches.extend([s for s in string_list if fnmatch.fnmatch(s, pattern)])
358
+
359
+ return matches
360
+
361
+
362
+ @app.command(name="migrations.fake")
363
+ @with_exit_code(hide_tb=IS_DEBUG)
364
+ def fake_migrations(
365
+ names: typing.Annotated[list[str], typer.Argument()] = None,
366
+ all: bool = False, # noqa: A002
367
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
368
+ migrations_file: Optional[str] = None,
369
+ db_uri: Optional[str] = None,
370
+ db_folder: Optional[str] = None,
371
+ migrate_table: Optional[str] = None,
372
+ dry_run: bool = False,
373
+ ) -> int:
374
+ """
375
+ Mark one or more migrations as completed in the database, without executing the SQL code.
376
+
377
+ glob is supported in 'names'
378
+ """
379
+ if not (names or all):
380
+ rich.print("Please provide one or more migration names, or pass --all to fake all.")
381
+ return 1
382
+
383
+ generic_config = load_config(connection)
384
+ migrate_config = generic_config.to_migrate()
385
+
386
+ migrate_config.update(
387
+ migrate_uri=db_uri,
388
+ migrate_table=migrate_table,
389
+ db_folder=db_folder,
390
+ migrations_file=migrations_file,
391
+ _skip_none=True,
392
+ )
393
+
394
+ migrations = edwh_migrate.list_migrations(migrate_config)
395
+
396
+ migration_names = list(migrations.keys())
397
+
398
+ to_fake = migration_names if all else match_strings(names or [], migration_names)
399
+
400
+ try:
401
+ db = edwh_migrate.setup_db(config=migrate_config)
402
+ except edwh_migrate.migrate.DatabaseNotYetInitialized:
403
+ db = edwh_migrate.setup_db(
404
+ config=migrate_config, migrate=True, migrate_enabled=True, remove_migrate_tablefile=True
405
+ )
406
+
407
+ previously_migrated = (
408
+ db(
409
+ db.ewh_implemented_features.name.belongs(to_fake)
410
+ & (db.ewh_implemented_features.installed == True) # noqa E712
411
+ )
412
+ .select(db.ewh_implemented_features.name)
413
+ .column("name")
414
+ )
415
+
416
+ if dry_run:
417
+ rich.print("Would migrate these:", [_ for _ in to_fake if _ not in previously_migrated])
418
+ return 0
419
+
420
+ n = len(to_fake)
421
+ print(f"{len(previously_migrated)} / {n} were already installed.")
422
+
423
+ for name in to_fake:
424
+ if name in previously_migrated:
425
+ continue
426
+
427
+ edwh_migrate.mark_migration(db, name=name, installed=True)
428
+
429
+ db.commit()
430
+ rich.print(f"Faked {n} new migrations.")
431
+ return 0
432
+
433
+
434
+ AnyNestedDict: typing.TypeAlias = dict[str, AnyDict]
435
+
436
+
437
+ def tabulate_data(data: AnyNestedDict) -> None:
438
+ """
439
+ Print a nested dict of data in a nice, human-readable table.
440
+ """
441
+ flattened_data = []
442
+ for key, inner_dict in data.items():
443
+ temp_dict = {"": key}
444
+ temp_dict.update(inner_dict)
445
+ flattened_data.append(temp_dict)
446
+
447
+ # Display the tabulated data from the transposed dictionary
448
+ print(tabulate(flattened_data, headers="keys"))
449
+
450
+
451
+ FormatOptions: typing.TypeAlias = typing.Literal["plaintext", "json", "yaml", "toml"]
452
+
453
+
454
+ def get_output_format(fmt: FormatOptions) -> typing.Callable[[AnyNestedDict], None]:
455
+ """
456
+ This function takes a format option as input and \
457
+ returns a function that can be used to output data in the specified format.
458
+ """
459
+ match fmt:
460
+ case "plaintext":
461
+ output = tabulate_data
462
+ case "json":
463
+
464
+ def output(_data: AnyDict | AnyNestedDict) -> None:
465
+ import json
466
+
467
+ print(json.dumps(_data, indent=2))
468
+
469
+ case "yaml":
470
+
471
+ def output(_data: AnyDict | AnyNestedDict) -> None:
472
+ import yaml
473
+
474
+ print(yaml.dump(_data))
475
+
476
+ case "toml":
477
+
478
+ def output(_data: AnyDict | AnyNestedDict) -> None:
479
+ import tomli_w
480
+
481
+ print(tomli_w.dumps(_data))
482
+
483
+ case _:
484
+ options = typing.get_args(FormatOptions)
485
+ raise ValueError(f"Invalid format '{fmt}'. Please choose one of {options}.")
486
+
487
+ return output
488
+
489
+
490
+ @app.command(name="cache.stats")
491
+ @with_exit_code(hide_tb=IS_DEBUG)
492
+ def cache_stats(
493
+ identifier: typing.Annotated[str, typer.Argument()] = "",
494
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
495
+ fmt: typing.Annotated[
496
+ str, typer.Option("--format", "--fmt", "-f", help="plaintext (default) or json")
497
+ ] = "plaintext",
498
+ ) -> None:
499
+ """
500
+ Collect caching stats.
501
+
502
+ Examples:
503
+ typedal cache.stats
504
+ typedal cache.stats user
505
+ typedal cache.stats user.3
506
+ """
507
+ config = load_config(connection)
508
+ db = TypeDAL(config=config, migrate=False, fake_migrate=False)
509
+
510
+ output = get_output_format(typing.cast(FormatOptions, fmt))
511
+
512
+ data: AnyDict
513
+ parts = identifier.split(".")
514
+ match parts:
515
+ case [] | [""]:
516
+ # generic stats
517
+ data = caching.calculate_stats(db) # type: ignore
518
+ case [table]:
519
+ # table stats
520
+ data = caching.table_stats(db, table) # type: ignore
521
+ case [table, row_id]:
522
+ # row stats
523
+ data = caching.row_stats(db, table, row_id) # type: ignore
524
+ case _:
525
+ raise ValueError("Please use the format `table` or `table.id` for this command.")
526
+
527
+ output(data)
528
+
529
+ # todo:
530
+ # - sort by most dependencies
531
+ # - sort by biggest data
532
+ # - include size for table_stats, row_stats
533
+ # - group by table
534
+
535
+
536
+ @app.command(name="cache.clear")
537
+ @with_exit_code(hide_tb=IS_DEBUG)
538
+ def cache_clear(
539
+ connection: typing.Annotated[str, typer.Option("--connection", "-c")] = None,
540
+ purge: typing.Annotated[bool, typer.Option("--all", "--purge", "-p")] = False,
541
+ ) -> None:
542
+ """
543
+ Clear (expired) items from the cache.
544
+
545
+ Args:
546
+ connection (optional): [tool.typedal.<connection>]
547
+ purge (default: no): remove all items, not only expired
548
+ """
549
+ config = load_config(connection)
550
+ db = TypeDAL(config=config, migrate=False, fake_migrate=False)
551
+
552
+ if purge:
553
+ caching.clear_cache()
554
+ print("Emptied cache")
555
+ else:
556
+ n = caching.clear_expired()
557
+ print(f"Removed {n} expired from cache")
558
+
559
+ db.commit()
560
+
561
+
337
562
  def version_callback() -> Never:
338
563
  """
339
564
  --version requested!
typedal/config.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  TypeDAL can be configured by a combination of pyproject.toml (static), env (dynamic) and code (programmic).
3
3
  """
4
+
4
5
  import os
5
6
  import re
6
7
  import typing
@@ -9,11 +10,13 @@ from collections import defaultdict
9
10
  from pathlib import Path
10
11
  from typing import Any, Optional
11
12
 
12
- import black.files
13
13
  import tomli
14
14
  from configuraptor import TypedConfig, alias
15
+ from configuraptor.helpers import find_pyproject_toml
15
16
  from dotenv import dotenv_values, find_dotenv
16
17
 
18
+ from .types import AnyDict
19
+
17
20
  if typing.TYPE_CHECKING: # pragma: no cover
18
21
  from edwh_migrate import Config as MigrateConfig
19
22
  from pydal2sql.typer_support import Config as P2SConfig
@@ -34,7 +37,7 @@ class TypeDALConfig(TypedConfig):
34
37
  connection: str = "default"
35
38
 
36
39
  # pydal2sql:
37
- input: str = "" # noqa: A003
40
+ input: str = ""
38
41
  output: str = ""
39
42
  noop: bool = False
40
43
  magic: bool = True
@@ -61,11 +64,7 @@ class TypeDALConfig(TypedConfig):
61
64
  db_type: str = alias("dialect")
62
65
  db_folder: str = alias("folder")
63
66
 
64
- def __repr__(self) -> str:
65
- """
66
- Dump the config to a (fancy) string.
67
- """
68
- return f"<TypeDAL {self.__dict__}>"
67
+ # repr set by @beautify (by inheriting from TypedConfig)
69
68
 
70
69
  def to_pydal2sql(self) -> "P2SConfig":
71
70
  """
@@ -123,14 +122,8 @@ class TypeDALConfig(TypedConfig):
123
122
  )
124
123
 
125
124
 
126
- def find_pyproject_toml(directory: str | None = None) -> typing.Optional[str]:
127
- """
128
- Find the project's config toml, looks up until it finds the project root (black's logic).
129
- """
130
- return black.files.find_pyproject_toml((directory or os.getcwd(),))
131
-
132
125
 
133
- def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
126
+ def _load_toml(path: str | bool | None = True) -> tuple[str, AnyDict]:
134
127
  """
135
128
  Path can be a file, a directory, a bool or None.
136
129
 
@@ -146,7 +139,7 @@ def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
146
139
  elif Path(str(path)).is_file():
147
140
  toml_path = str(path)
148
141
  else:
149
- toml_path = find_pyproject_toml(str(path))
142
+ toml_path = find_pyproject_toml(path)
150
143
 
151
144
  if not toml_path:
152
145
  # nothing to load
@@ -156,13 +149,13 @@ def _load_toml(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
156
149
  with open(toml_path, "rb") as f:
157
150
  data = tomli.load(f)
158
151
 
159
- return toml_path or "", typing.cast(dict[str, Any], data["tool"]["typedal"])
152
+ return str(toml_path) or "", typing.cast(AnyDict, data["tool"]["typedal"])
160
153
  except Exception as e:
161
154
  warnings.warn(f"Could not load typedal config toml: {e}", source=e)
162
- return toml_path or "", {}
155
+ return str(toml_path) or "", {}
163
156
 
164
157
 
165
- def _load_dotenv(path: str | bool | None = True) -> tuple[str, dict[str, Any]]:
158
+ def _load_dotenv(path: str | bool | None = True) -> tuple[str, AnyDict]:
166
159
  fallback_data = {k.lower().removeprefix("typedal_"): v for k, v in os.environ.items()}
167
160
  if path is False:
168
161
  dotenv_path = None
@@ -202,28 +195,28 @@ def get_db_for_alias(db_name: str) -> str:
202
195
  return DB_ALIASES.get(db_name, db_name)
203
196
 
204
197
 
205
- DEFAULTS: dict[str, Any | typing.Callable[[dict[str, Any]], Any]] = {
198
+ DEFAULTS: dict[str, Any | typing.Callable[[AnyDict], Any]] = {
206
199
  "database": lambda data: data.get("db_uri") or "sqlite:memory",
207
- "dialect": lambda data: get_db_for_alias(data["database"].split(":")[0])
208
- if ":" in data["database"]
209
- else data.get("db_type"),
200
+ "dialect": lambda data: (
201
+ get_db_for_alias(data["database"].split(":")[0]) if ":" in data["database"] else data.get("db_type")
202
+ ),
210
203
  "migrate": lambda data: not (data.get("input") or data.get("output")),
211
204
  "folder": lambda data: data.get("db_folder"),
212
- "flag_location": lambda data: f"{db_folder}/flags"
213
- if (db_folder := (data.get("folder") or data.get("db_folder")))
214
- else "/flags",
205
+ "flag_location": lambda data: (
206
+ f"{db_folder}/flags" if (db_folder := (data.get("folder") or data.get("db_folder"))) else "/flags"
207
+ ),
215
208
  "pool_size": lambda data: 1 if data.get("dialect", "sqlite") == "sqlite" else 3,
216
209
  }
217
210
 
218
211
 
219
- def _fill_defaults(data: dict[str, Any], prop: str, fallback: Any = None) -> None:
212
+ def _fill_defaults(data: AnyDict, prop: str, fallback: Any = None) -> None:
220
213
  default = DEFAULTS.get(prop, fallback)
221
214
  if callable(default):
222
215
  default = default(data)
223
216
  data[prop] = default
224
217
 
225
218
 
226
- def fill_defaults(data: dict[str, Any], prop: str) -> None:
219
+ def fill_defaults(data: AnyDict, prop: str) -> None:
227
220
  """
228
221
  Fill missing property defaults with (calculated) sane defaults.
229
222
  """
@@ -231,14 +224,16 @@ def fill_defaults(data: dict[str, Any], prop: str) -> None:
231
224
  _fill_defaults(data, prop)
232
225
 
233
226
 
234
- TRANSFORMS: dict[str, typing.Callable[[dict[str, Any]], Any]] = {
235
- "database": lambda data: data["database"]
236
- if (":" in data["database"] or not data.get("dialect"))
237
- else (data["dialect"] + "://" + data["database"])
227
+ TRANSFORMS: dict[str, typing.Callable[[AnyDict], Any]] = {
228
+ "database": lambda data: (
229
+ data["database"]
230
+ if (":" in data["database"] or not data.get("dialect"))
231
+ else (data["dialect"] + "://" + data["database"])
232
+ )
238
233
  }
239
234
 
240
235
 
241
- def transform(data: dict[str, Any], prop: str) -> bool:
236
+ def transform(data: AnyDict, prop: str) -> bool:
242
237
  """
243
238
  After the user has chosen a value, possibly transform it.
244
239
  """
@@ -278,7 +273,7 @@ def expand_posix_vars(posix_expr: str, context: dict[str, str]) -> str:
278
273
  return re.sub(pattern, replace_var, posix_expr)
279
274
 
280
275
 
281
- def expand_env_vars_into_toml_values(toml: dict[str, Any], env: dict[str, Any]) -> None:
276
+ def expand_env_vars_into_toml_values(toml: AnyDict, env: AnyDict) -> None:
282
277
  """
283
278
  Recursively expands POSIX/Docker Compose-like environment variables in a TOML dictionary.
284
279
 
@@ -331,7 +326,10 @@ def expand_env_vars_into_toml_values(toml: dict[str, Any], env: dict[str, Any])
331
326
 
332
327
 
333
328
  def load_config(
334
- _use_pyproject: bool | str | None = True, _use_env: bool | str | None = True, **fallback: Any
329
+ connection_name: Optional[str] = None,
330
+ _use_pyproject: bool | str | None = True,
331
+ _use_env: bool | str | None = True,
332
+ **fallback: Any,
335
333
  ) -> TypeDALConfig:
336
334
  """
337
335
  Combines multiple sources of config into one config instance.
@@ -345,8 +343,8 @@ def load_config(
345
343
 
346
344
  expand_env_vars_into_toml_values(toml, dotenv)
347
345
 
348
- connection_name = dotenv.get("connection", "") or toml.get("default", "")
349
- connection: dict[str, Any] = (toml.get(connection_name) if connection_name else toml) or {}
346
+ connection_name = connection_name or dotenv.get("connection", "") or toml.get("default", "")
347
+ connection: AnyDict = (toml.get(connection_name) if connection_name else toml) or {}
350
348
 
351
349
  combined = connection | dotenv | fallback
352
350
  combined = {k.replace("-", "_"): v for k, v in combined.items()}
typedal/core.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Core functionality of TypeDAL.
3
3
  """
4
+
4
5
  import contextlib
5
6
  import csv
6
7
  import datetime as dt
@@ -44,6 +45,7 @@ from .types import (
44
45
  AfterDeleteCallable,
45
46
  AfterInsertCallable,
46
47
  AfterUpdateCallable,
48
+ AnyDict,
47
49
  BeforeDeleteCallable,
48
50
  BeforeInsertCallable,
49
51
  BeforeUpdateCallable,
@@ -352,8 +354,8 @@ class TypeDAL(pydal.DAL): # type: ignore
352
354
  migrate_enabled: bool = True,
353
355
  fake_migrate_all: bool = False,
354
356
  decode_credentials: bool = False,
355
- driver_args: Optional[dict[str, Any]] = None,
356
- adapter_args: Optional[dict[str, Any]] = None,
357
+ driver_args: Optional[AnyDict] = None,
358
+ adapter_args: Optional[AnyDict] = None,
357
359
  attempts: int = 5,
358
360
  auto_import: bool = False,
359
361
  bigint_id: bool = False,
@@ -368,13 +370,15 @@ class TypeDAL(pydal.DAL): # type: ignore
368
370
  enable_typedal_caching: bool = None,
369
371
  use_pyproject: bool | str = True,
370
372
  use_env: bool | str = True,
373
+ connection: Optional[str] = None,
374
+ config: Optional[TypeDALConfig] = None,
371
375
  ) -> None:
372
376
  """
373
377
  Adds some internal tables after calling pydal's default init.
374
378
 
375
379
  Set enable_typedal_caching to False to disable this behavior.
376
380
  """
377
- config = load_config(_use_pyproject=use_pyproject, _use_env=use_env)
381
+ config = config or load_config(connection, _use_pyproject=use_pyproject, _use_env=use_env)
378
382
  config.update(
379
383
  database=uri,
380
384
  dialect=uri.split(":")[0] if uri and ":" in uri else None,
@@ -438,7 +442,7 @@ class TypeDAL(pydal.DAL): # type: ignore
438
442
  # try again:
439
443
  return self.define(model, migrate=True, fake_migrate=True, redefine=True)
440
444
 
441
- default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = {
445
+ default_kwargs: typing.ClassVar[AnyDict] = {
442
446
  # fields are 'required' (notnull) by default:
443
447
  "notnull": True,
444
448
  }
@@ -747,7 +751,7 @@ class TableProtocol(typing.Protocol): # pragma: no cover
747
751
  Make mypy happy.
748
752
  """
749
753
 
750
- id: "TypedField[int]" # noqa: A003
754
+ id: "TypedField[int]"
751
755
 
752
756
  def __getitem__(self, item: str) -> Field:
753
757
  """
@@ -847,7 +851,7 @@ class TableMeta(type):
847
851
  """
848
852
  return self(row)
849
853
 
850
- def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003
854
+ def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
851
855
  """
852
856
  Return all rows for this model.
853
857
  """
@@ -886,7 +890,7 @@ class TableMeta(type):
886
890
 
887
891
  return str(table._insert(**fields))
888
892
 
889
- def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]":
893
+ def bulk_insert(self: typing.Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
890
894
  """
891
895
  Insert multiple rows, returns a TypedRows set of new instances.
892
896
  """
@@ -895,7 +899,7 @@ class TableMeta(type):
895
899
  return self.where(lambda row: row.id.belongs(result)).collect()
896
900
 
897
901
  def update_or_insert(
898
- self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any
902
+ self: typing.Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any
899
903
  ) -> T_MetaInstance:
900
904
  """
901
905
  Update a row if query matches, else insert a new one.
@@ -1323,7 +1327,7 @@ class TypedTable(metaclass=TableMeta):
1323
1327
 
1324
1328
  _with: list[str]
1325
1329
 
1326
- id: "TypedField[int]" # noqa: A003
1330
+ id: "TypedField[int]"
1327
1331
 
1328
1332
  _before_insert: list[BeforeInsertCallable]
1329
1333
  _after_insert: list[AfterInsertCallable]
@@ -1471,7 +1475,7 @@ class TypedTable(metaclass=TableMeta):
1471
1475
  # underscore variants work for class instances (set up by _setup_instance_methods)
1472
1476
 
1473
1477
  @classmethod
1474
- def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]:
1478
+ def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
1475
1479
  """
1476
1480
  Dump the object to a plain dict.
1477
1481
 
@@ -1481,7 +1485,7 @@ class TypedTable(metaclass=TableMeta):
1481
1485
  """
1482
1486
  table = cls._ensure_table_defined()
1483
1487
  result = table.as_dict(flat, sanitize)
1484
- return typing.cast(dict[str, Any], result)
1488
+ return typing.cast(AnyDict, result)
1485
1489
 
1486
1490
  @classmethod
1487
1491
  def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
@@ -1521,7 +1525,7 @@ class TypedTable(metaclass=TableMeta):
1521
1525
 
1522
1526
  def _as_dict(
1523
1527
  self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1524
- ) -> dict[str, Any]:
1528
+ ) -> AnyDict:
1525
1529
  row = self._ensure_matching_row()
1526
1530
 
1527
1531
  result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
@@ -1545,7 +1549,7 @@ class TypedTable(metaclass=TableMeta):
1545
1549
 
1546
1550
  result[relationship] = data
1547
1551
 
1548
- return typing.cast(dict[str, Any], result)
1552
+ return typing.cast(AnyDict, result)
1549
1553
 
1550
1554
  def _as_json(
1551
1555
  self,
@@ -1633,7 +1637,7 @@ class TypedTable(metaclass=TableMeta):
1633
1637
 
1634
1638
  # pickling:
1635
1639
 
1636
- def __getstate__(self) -> dict[str, Any]:
1640
+ def __getstate__(self) -> AnyDict:
1637
1641
  """
1638
1642
  State to save when pickling.
1639
1643
 
@@ -1641,7 +1645,7 @@ class TypedTable(metaclass=TableMeta):
1641
1645
  Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1642
1646
  """
1643
1647
  row = self._ensure_matching_row()
1644
- result: dict[str, Any] = row.as_dict()
1648
+ result: AnyDict = row.as_dict()
1645
1649
 
1646
1650
  if _with := getattr(self, "_with", None):
1647
1651
  result["_with"] = _with
@@ -1653,7 +1657,7 @@ class TypedTable(metaclass=TableMeta):
1653
1657
  result["_row"] = self._row.as_json() if self._row else ""
1654
1658
  return result
1655
1659
 
1656
- def __setstate__(self, state: dict[str, Any]) -> None:
1660
+ def __setstate__(self, state: AnyDict) -> None:
1657
1661
  """
1658
1662
  Used by dill when loading from a bytestring.
1659
1663
  """
@@ -1842,14 +1846,14 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1842
1846
  storage_to_dict: bool = False,
1843
1847
  datetime_to_str: bool = False,
1844
1848
  custom_types: list[type] = None,
1845
- ) -> dict[int, dict[str, Any]]:
1849
+ ) -> dict[int, AnyDict]:
1846
1850
  """
1847
1851
  Get the data in a dict of dicts.
1848
1852
  """
1849
1853
  if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1850
1854
  # functionality not guaranteed
1851
1855
  return typing.cast(
1852
- dict[int, dict[str, Any]],
1856
+ dict[int, AnyDict],
1853
1857
  super().as_dict(
1854
1858
  key or "id",
1855
1859
  compact,
@@ -1881,14 +1885,12 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1881
1885
  storage_to_dict: bool = False,
1882
1886
  datetime_to_str: bool = False,
1883
1887
  custom_types: list[type] = None,
1884
- ) -> list[dict[str, Any]]:
1888
+ ) -> list[AnyDict]:
1885
1889
  """
1886
1890
  Get the data in a list of dicts.
1887
1891
  """
1888
1892
  if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1889
- return typing.cast(
1890
- list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)
1891
- )
1893
+ return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
1892
1894
 
1893
1895
  return [_.as_dict() for _ in self.records.values()]
1894
1896
 
@@ -1992,7 +1994,7 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
1992
1994
  """
1993
1995
  return cls(rows, model, metadata=metadata)
1994
1996
 
1995
- def __getstate__(self) -> dict[str, Any]:
1997
+ def __getstate__(self) -> AnyDict:
1996
1998
  """
1997
1999
  Used by dill to dump to bytes (exclude db connection etc).
1998
2000
  """
@@ -2003,7 +2005,7 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
2003
2005
  "colnames": self.colnames,
2004
2006
  }
2005
2007
 
2006
- def __setstate__(self, state: dict[str, Any]) -> None:
2008
+ def __setstate__(self, state: AnyDict) -> None:
2007
2009
  """
2008
2010
  Used by dill when loading from a bytestring.
2009
2011
  """
@@ -2031,7 +2033,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2031
2033
  model: typing.Type[T_MetaInstance]
2032
2034
  query: Query
2033
2035
  select_args: list[Any]
2034
- select_kwargs: dict[str, Any]
2036
+ select_kwargs: AnyDict
2035
2037
  relationships: dict[str, Relationship[Any]]
2036
2038
  metadata: Metadata
2037
2039
 
@@ -2040,7 +2042,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2040
2042
  model: typing.Type[T_MetaInstance],
2041
2043
  add_query: Optional[Query] = None,
2042
2044
  select_args: Optional[list[Any]] = None,
2043
- select_kwargs: Optional[dict[str, Any]] = None,
2045
+ select_kwargs: Optional[AnyDict] = None,
2044
2046
  relationships: dict[str, Relationship[Any]] = None,
2045
2047
  metadata: Metadata = None,
2046
2048
  ):
@@ -2090,7 +2092,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2090
2092
  add_query: Optional[Query] = None,
2091
2093
  overwrite_query: Optional[Query] = None,
2092
2094
  select_args: Optional[list[Any]] = None,
2093
- select_kwargs: Optional[dict[str, Any]] = None,
2095
+ select_kwargs: Optional[AnyDict] = None,
2094
2096
  relationships: dict[str, Relationship[Any]] = None,
2095
2097
  metadata: Metadata = None,
2096
2098
  ) -> "QueryBuilder[T_MetaInstance]":
@@ -2292,7 +2294,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2292
2294
  db = self._get_db()
2293
2295
  return str(db(self.query)._update(**fields))
2294
2296
 
2295
- def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]:
2297
+ def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], AnyDict]:
2296
2298
  select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2297
2299
  select_kwargs = self.select_kwargs.copy()
2298
2300
  query = self.query
@@ -2352,6 +2354,17 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2352
2354
 
2353
2355
  return load_from_cache(key, self._get_db())
2354
2356
 
2357
+ def execute(self, add_id: bool = False) -> Rows:
2358
+ """
2359
+ Raw version of .collect which only executes the SQL, without performing any magic afterwards.
2360
+ """
2361
+ db = self._get_db()
2362
+ metadata = typing.cast(Metadata, self.metadata.copy())
2363
+
2364
+ query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2365
+
2366
+ return db(query).select(*select_args, **select_kwargs)
2367
+
2355
2368
  def collect(
2356
2369
  self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2357
2370
  ) -> "TypedRows[T_MetaInstance]":
@@ -2400,7 +2413,7 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
2400
2413
  self,
2401
2414
  query: Query,
2402
2415
  select_args: list[Any],
2403
- select_kwargs: dict[str, Any],
2416
+ select_kwargs: AnyDict,
2404
2417
  metadata: Metadata,
2405
2418
  ) -> tuple[Query, list[Any]]:
2406
2419
  db = self._get_db()
@@ -2718,7 +2731,7 @@ class PaginatedRows(TypedRows[T_MetaInstance]):
2718
2731
  "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2719
2732
  }
2720
2733
 
2721
- def next(self) -> Self: # noqa: A003
2734
+ def next(self) -> Self:
2722
2735
  """
2723
2736
  Get the next page.
2724
2737
  """
@@ -2754,7 +2767,7 @@ class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2754
2767
  This class is not actually used, only 'cast' by TypeDAL.__call__
2755
2768
  """
2756
2769
 
2757
- def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int:
2770
+ def count(self, distinct: bool = None, cache: AnyDict = None) -> int:
2758
2771
  """
2759
2772
  Count returns an int.
2760
2773
  """
typedal/for_py4web.py CHANGED
@@ -1,13 +1,13 @@
1
1
  """
2
2
  ONLY USE IN COMBINATION WITH PY4WEB!
3
3
  """
4
- from typing import Any
5
4
 
6
5
  import threadsafevariable
7
6
  from py4web.core import ICECUBE
8
7
  from py4web.core import Fixture as _Fixture
9
8
 
10
9
  from .core import TypeDAL
10
+ from .types import AnyDict
11
11
  from .web2py_py4web_shared import AuthUser
12
12
 
13
13
 
@@ -22,20 +22,20 @@ class DAL(TypeDAL, Fixture): # pragma: no cover
22
22
  Fixture similar to the py4web pydal fixture, but for typedal.
23
23
  """
24
24
 
25
- def on_request(self, _: dict[str, Any]) -> None:
25
+ def on_request(self, _: AnyDict) -> None:
26
26
  """
27
27
  Make sure there is a database connection when a request comes in.
28
28
  """
29
29
  self.get_connection_from_pool_or_new()
30
30
  threadsafevariable.ThreadSafeVariable.restore(ICECUBE)
31
31
 
32
- def on_error(self, _: dict[str, Any]) -> None:
32
+ def on_error(self, _: AnyDict) -> None:
33
33
  """
34
34
  Rollback db on error.
35
35
  """
36
36
  self.recycle_connection_in_pool_or_close("rollback")
37
37
 
38
- def on_success(self, _: dict[str, Any]) -> None:
38
+ def on_success(self, _: AnyDict) -> None:
39
39
  """
40
40
  Commit db on success.
41
41
  """
typedal/helpers.py CHANGED
@@ -1,12 +1,15 @@
1
1
  """
2
2
  Helpers that work independently of core.
3
3
  """
4
+
4
5
  import io
5
6
  import types
6
7
  import typing
7
8
  from collections import ChainMap
8
9
  from typing import Any
9
10
 
11
+ from .types import AnyDict
12
+
10
13
  T = typing.TypeVar("T")
11
14
 
12
15
 
@@ -29,7 +32,7 @@ def _all_annotations(cls: type) -> ChainMap[str, type]:
29
32
  return ChainMap(*(c.__annotations__ for c in getattr(cls, "__mro__", []) if "__annotations__" in c.__dict__))
30
33
 
31
34
 
32
- def all_dict(cls: type) -> dict[str, Any]:
35
+ def all_dict(cls: type) -> AnyDict:
33
36
  """
34
37
  Get the internal data of a class and all it's parents.
35
38
  """
typedal/types.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Stuff to make mypy happy.
3
3
  """
4
+
4
5
  import typing
5
6
  from datetime import datetime
6
7
  from typing import Any, Optional, TypedDict
@@ -15,6 +16,8 @@ from pydal.objects import Set as _Set
15
16
  from pydal.validators import Validator as _Validator
16
17
  from typing_extensions import NotRequired
17
18
 
19
+ AnyDict: typing.TypeAlias = dict[str, Any]
20
+
18
21
 
19
22
  class Query(_Query): # type: ignore
20
23
  """
@@ -116,7 +119,7 @@ class PaginateDict(TypedDict):
116
119
  Result of PaginatedRows.as_dict().
117
120
  """
118
121
 
119
- data: dict[int, dict[str, Any]]
122
+ data: dict[int, AnyDict]
120
123
  pagination: Pagination
121
124
 
122
125
 
@@ -158,7 +161,7 @@ class Metadata(TypedDict):
158
161
 
159
162
  final_query: NotRequired[Query | str | None]
160
163
  final_args: NotRequired[list[Any]]
161
- final_kwargs: NotRequired[dict[str, Any]]
164
+ final_kwargs: NotRequired[AnyDict]
162
165
  relationships: NotRequired[set[str]]
163
166
 
164
167
  sql: NotRequired[str]
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: TypeDAL
3
- Version: 2.3.6
3
+ Version: 3.0.0
4
4
  Summary: Typing support for PyDAL
5
5
  Project-URL: Documentation, https://typedal.readthedocs.io/
6
6
  Project-URL: Issues, https://github.com/trialandsuccess/TypeDAL/issues
@@ -16,14 +16,15 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
16
16
  Classifier: Programming Language :: Python :: Implementation :: PyPy
17
17
  Requires-Python: >=3.10
18
18
  Requires-Dist: configurable-json
19
- Requires-Dist: configuraptor>=1.24.0
19
+ Requires-Dist: configuraptor>=1.26.2
20
20
  Requires-Dist: dill
21
21
  Requires-Dist: pydal
22
22
  Provides-Extra: all
23
- Requires-Dist: edwh-migrate; extra == 'all'
23
+ Requires-Dist: edwh-migrate>=0.8.0b1; extra == 'all'
24
24
  Requires-Dist: py4web; extra == 'all'
25
- Requires-Dist: pydal2sql[all]; extra == 'all'
25
+ Requires-Dist: pydal2sql[all]>=1.1.3; extra == 'all'
26
26
  Requires-Dist: questionary; extra == 'all'
27
+ Requires-Dist: tabulate; extra == 'all'
27
28
  Requires-Dist: tomlkit; extra == 'all'
28
29
  Requires-Dist: typer; extra == 'all'
29
30
  Provides-Extra: dev
@@ -33,10 +34,13 @@ Requires-Dist: mkdocs-dracula-theme; extra == 'dev'
33
34
  Requires-Dist: pytest-mypy-testing; extra == 'dev'
34
35
  Requires-Dist: python-semantic-release<8; extra == 'dev'
35
36
  Requires-Dist: su6[all]; extra == 'dev'
37
+ Requires-Dist: types-pyyaml; extra == 'dev'
38
+ Requires-Dist: types-tabulate; extra == 'dev'
36
39
  Provides-Extra: migrations
37
- Requires-Dist: edwh-migrate; extra == 'migrations'
38
- Requires-Dist: pydal2sql; extra == 'migrations'
40
+ Requires-Dist: edwh-migrate>=0.8.0b1; extra == 'migrations'
41
+ Requires-Dist: pydal2sql>=1.1.3; extra == 'migrations'
39
42
  Requires-Dist: questionary; extra == 'migrations'
43
+ Requires-Dist: tabulate; extra == 'migrations'
40
44
  Requires-Dist: tomlkit; extra == 'migrations'
41
45
  Requires-Dist: typer; extra == 'migrations'
42
46
  Provides-Extra: py4web
@@ -0,0 +1,18 @@
1
+ typedal/__about__.py,sha256=q5Cn2Nax8QEhpWVeFUyO2PApUwXIgvp5_OADI5f0kMc,206
2
+ typedal/__init__.py,sha256=QQpLiVl9w9hm2LBxey49Y_tCF_VB2bScVaS_mCjYy54,366
3
+ typedal/caching.py,sha256=8UABVAhOlBpL96ykmqhxLaFYOe-XeAh7JoGh57OkxP8,11818
4
+ typedal/cli.py,sha256=5-2U_pQOZNKHmhefiYtkd7g6B0DAXzjf4A1Jh7D37io,18427
5
+ typedal/config.py,sha256=KDJXRsIQuFpSZy5XpSJiC_9WGLlmaOexACW0sWdCw54,11626
6
+ typedal/core.py,sha256=qgJPvlcQYCujsjiiD6SOhWbIr1lxoUDpZUkMnK-mcDQ,95038
7
+ typedal/fields.py,sha256=z2PD9vLWqBR_zXtiY0DthqTG4AeF3yxKoeuVfGXnSdg,5197
8
+ typedal/for_py4web.py,sha256=d07b8hL_PvNDUS26Z5fDH2OxWb-IETBuAFPSzrRwm04,1285
9
+ typedal/for_web2py.py,sha256=zvd5xC-SmuKc0JLDqT3hMIs6COaYnwTFXD_BIeC1vug,1832
10
+ typedal/helpers.py,sha256=BFuGd-1tBA1-QS91C9PEvNY5z5KFHd3gTplxxDWdwSo,6509
11
+ typedal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ typedal/types.py,sha256=1kGkNX6vfGg6ln84AG558C4Zx5ACRz-emrUTnuy-rRY,3410
13
+ typedal/web2py_py4web_shared.py,sha256=cEbjkK0WOS9Q0nTyZuQaJWffeP4bjrL79Bx0xGy_UOs,1504
14
+ typedal/serializers/as_json.py,sha256=ffo152W-sARYXym4BzwX709rrO2-QwKk2KunWY8RNl4,2229
15
+ typedal-3.0.0.dist-info/METADATA,sha256=32JYzSrTHFkQomZ2v2iEAv0MSLx0VOnY-2w2pV36XPQ,7782
16
+ typedal-3.0.0.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87
17
+ typedal-3.0.0.dist-info/entry_points.txt,sha256=m1wqcc_10rHWPdlQ71zEkmJDADUAnZtn7Jac_6mbyUc,44
18
+ typedal-3.0.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.17.1
2
+ Generator: hatchling 1.22.4
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,18 +0,0 @@
1
- typedal/__about__.py,sha256=kbkWDzpCiYknqWjNq1J8RYaeb9wBLllZwZlNvQscZOk,206
2
- typedal/__init__.py,sha256=QQpLiVl9w9hm2LBxey49Y_tCF_VB2bScVaS_mCjYy54,366
3
- typedal/caching.py,sha256=cglkCphbg93Iy9-KHefQN9-JJxuA5-HjpzZAdw2BGvY,7709
4
- typedal/cli.py,sha256=F7M9D2pu2D-FoKZJrT3K9Wmbv5_ScUR8j5jNGoTgOfk,11800
5
- typedal/config.py,sha256=oDYSuPMm261hBoDBAXoq37Umf1Vw8rNx4BlP35WwCSY,11882
6
- typedal/core.py,sha256=hT4XLEMz_IzjTmWu2-scTP7VvrS0UwFSbG7A62OUnv0,94749
7
- typedal/fields.py,sha256=z2PD9vLWqBR_zXtiY0DthqTG4AeF3yxKoeuVfGXnSdg,5197
8
- typedal/for_py4web.py,sha256=dwh3cYThR6j2l_RDChcuSH3gXjv8FY-az3t5Lni5A7M,1302
9
- typedal/for_web2py.py,sha256=zvd5xC-SmuKc0JLDqT3hMIs6COaYnwTFXD_BIeC1vug,1832
10
- typedal/helpers.py,sha256=ZpHdwBMSANw-P9I5gs56Vf6GUbxGzFsIwbBvASKXX8s,6487
11
- typedal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- typedal/types.py,sha256=5qm3PgS8DXGCu9ZTUWQiIi2XXD8gz4_4Csg_vZlu_yo,3379
13
- typedal/web2py_py4web_shared.py,sha256=cEbjkK0WOS9Q0nTyZuQaJWffeP4bjrL79Bx0xGy_UOs,1504
14
- typedal/serializers/as_json.py,sha256=ffo152W-sARYXym4BzwX709rrO2-QwKk2KunWY8RNl4,2229
15
- typedal-2.3.6.dist-info/METADATA,sha256=NVLY0EjdnmVKMpFMpPxnmyz9hEQN1ZrQEHccMRNtkYY,7573
16
- typedal-2.3.6.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
17
- typedal-2.3.6.dist-info/entry_points.txt,sha256=m1wqcc_10rHWPdlQ71zEkmJDADUAnZtn7Jac_6mbyUc,44
18
- typedal-2.3.6.dist-info/RECORD,,