sql-athame 0.4.0a9__tar.gz → 0.4.0a11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sql_athame-0.4.0a11/.bumpversion.cfg +19 -0
- sql_athame-0.4.0a11/.editorconfig +7 -0
- sql_athame-0.4.0a11/.github/workflows/publish.yml +27 -0
- sql_athame-0.4.0a11/.github/workflows/test.yml +48 -0
- sql_athame-0.4.0a11/.gitignore +9 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/PKG-INFO +9 -16
- sql_athame-0.4.0a11/docker-compose.yml +9 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/pyproject.toml +32 -26
- sql_athame-0.4.0a11/run +40 -0
- sql_athame-0.4.0a11/sql_athame/__init__.py +2 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/dataclasses.py +132 -29
- sql_athame-0.4.0a11/tests/__init__.py +0 -0
- sql_athame-0.4.0a11/tests/test_asyncpg.py +323 -0
- sql_athame-0.4.0a11/tests/test_basic.py +287 -0
- sql_athame-0.4.0a11/tests/test_dataclasses.py +204 -0
- sql_athame-0.4.0a11/tests/test_sqlalchemy.py +60 -0
- sql_athame-0.4.0a11/uv.lock +1120 -0
- sql_athame-0.4.0a9/sql_athame/__init__.py +0 -2
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/LICENSE +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/README.md +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/base.py +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/escape.py +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/py.typed +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/sqlalchemy.py +0 -0
- {sql_athame-0.4.0a9 → sql_athame-0.4.0a11}/sql_athame/types.py +0 -0
@@ -0,0 +1,19 @@
|
|
1
|
+
[bumpversion]
|
2
|
+
current_version = 0.4.0-alpha-11
|
3
|
+
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<release>.*)-(?P<build>\d+))?
|
4
|
+
serialize =
|
5
|
+
{major}.{minor}.{patch}-{release}-{build}
|
6
|
+
{major}.{minor}.{patch}
|
7
|
+
commit = True
|
8
|
+
|
9
|
+
[bumpversion:file:pyproject.toml]
|
10
|
+
|
11
|
+
[bumpversion:part:release]
|
12
|
+
first_value = regular
|
13
|
+
optional_value = regular
|
14
|
+
values =
|
15
|
+
alpha
|
16
|
+
beta
|
17
|
+
rc
|
18
|
+
test
|
19
|
+
regular
|
@@ -0,0 +1,27 @@
|
|
1
|
+
name: publish
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
tags:
|
6
|
+
- 'v**'
|
7
|
+
|
8
|
+
jobs:
|
9
|
+
publish:
|
10
|
+
runs-on: ubuntu-latest
|
11
|
+
env:
|
12
|
+
UV_PUBLISH_TOKEN: ${{ secrets.POETRY_PYPI_TOKEN_PYPI }}
|
13
|
+
steps:
|
14
|
+
- uses: actions/checkout@v4
|
15
|
+
|
16
|
+
- uses: astral-sh/setup-uv@v5
|
17
|
+
with:
|
18
|
+
version: "0.6.6"
|
19
|
+
enable-cache: true
|
20
|
+
python-version: '3.12'
|
21
|
+
|
22
|
+
- uses: actions/setup-python@v5
|
23
|
+
with:
|
24
|
+
python-version: '3.12'
|
25
|
+
|
26
|
+
- run: uv build
|
27
|
+
- run: uv publish
|
@@ -0,0 +1,48 @@
|
|
1
|
+
name: test
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
branches:
|
6
|
+
- '**'
|
7
|
+
|
8
|
+
jobs:
|
9
|
+
test:
|
10
|
+
strategy:
|
11
|
+
matrix:
|
12
|
+
python-version:
|
13
|
+
- '3.9'
|
14
|
+
- '3.10'
|
15
|
+
- '3.11'
|
16
|
+
- '3.12'
|
17
|
+
- '3.13'
|
18
|
+
runs-on: ubuntu-latest
|
19
|
+
services:
|
20
|
+
postgres:
|
21
|
+
image: postgres:16
|
22
|
+
ports:
|
23
|
+
- 5432:5432
|
24
|
+
env:
|
25
|
+
POSTGRES_PASSWORD: password
|
26
|
+
env:
|
27
|
+
PGPORT: 5432
|
28
|
+
steps:
|
29
|
+
- uses: actions/checkout@v4
|
30
|
+
|
31
|
+
- uses: astral-sh/setup-uv@v5
|
32
|
+
with:
|
33
|
+
version: "0.6.6"
|
34
|
+
enable-cache: true
|
35
|
+
python-version: ${{ matrix.python-version }}
|
36
|
+
|
37
|
+
- uses: actions/setup-python@v5
|
38
|
+
with:
|
39
|
+
python-version: ${{ matrix.python-version }}
|
40
|
+
|
41
|
+
- run: uv sync
|
42
|
+
- run: uv run ruff check
|
43
|
+
- run: uv run ruff format --diff
|
44
|
+
if: success() || failure()
|
45
|
+
- run: uv run mypy sql_athame/**.py tests/**.py
|
46
|
+
if: success() || failure()
|
47
|
+
- run: uv run pytest
|
48
|
+
if: success() || failure()
|
@@ -1,22 +1,16 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: sql-athame
|
3
|
-
Version: 0.4.
|
3
|
+
Version: 0.4.0a11
|
4
4
|
Summary: Python tool for slicing and dicing SQL
|
5
|
-
|
5
|
+
Project-URL: homepage, https://github.com/bdowning/sql-athame
|
6
|
+
Project-URL: repository, https://github.com/bdowning/sql-athame
|
7
|
+
Author-email: Brian Downing <bdowning@lavos.net>
|
6
8
|
License: MIT
|
7
|
-
|
8
|
-
|
9
|
-
Requires-Python: >=3.9,<4.0
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
11
|
-
Classifier: Programming Language :: Python :: 3
|
12
|
-
Classifier: Programming Language :: Python :: 3.9
|
13
|
-
Classifier: Programming Language :: Python :: 3.10
|
14
|
-
Classifier: Programming Language :: Python :: 3.11
|
15
|
-
Classifier: Programming Language :: Python :: 3.12
|
16
|
-
Provides-Extra: asyncpg
|
17
|
-
Requires-Dist: asyncpg ; extra == "asyncpg"
|
9
|
+
License-File: LICENSE
|
10
|
+
Requires-Python: <4.0,>=3.9
|
18
11
|
Requires-Dist: typing-extensions
|
19
|
-
|
12
|
+
Provides-Extra: asyncpg
|
13
|
+
Requires-Dist: asyncpg; extra == 'asyncpg'
|
20
14
|
Description-Content-Type: text/markdown
|
21
15
|
|
22
16
|
# sql-athame
|
@@ -405,4 +399,3 @@ MIT.
|
|
405
399
|
|
406
400
|
---
|
407
401
|
Copyright (c) 2019, 2020 Brian Downing
|
408
|
-
|
@@ -1,37 +1,44 @@
|
|
1
|
-
[
|
1
|
+
[project]
|
2
|
+
authors = [
|
3
|
+
{name = "Brian Downing", email = "bdowning@lavos.net"},
|
4
|
+
]
|
5
|
+
license = {text = "MIT"}
|
6
|
+
requires-python = "<4.0,>=3.9"
|
7
|
+
dependencies = [
|
8
|
+
"typing-extensions",
|
9
|
+
]
|
2
10
|
name = "sql-athame"
|
3
|
-
version = "0.4.0-alpha-
|
11
|
+
version = "0.4.0-alpha-11"
|
4
12
|
description = "Python tool for slicing and dicing SQL"
|
5
|
-
authors = ["Brian Downing <bdowning@lavos.net>"]
|
6
|
-
license = "MIT"
|
7
13
|
readme = "README.md"
|
14
|
+
|
15
|
+
[project.urls]
|
8
16
|
homepage = "https://github.com/bdowning/sql-athame"
|
9
17
|
repository = "https://github.com/bdowning/sql-athame"
|
10
18
|
|
11
|
-
[
|
12
|
-
asyncpg = [
|
13
|
-
|
14
|
-
|
15
|
-
python = "^3.9"
|
16
|
-
asyncpg = { version = "*", optional = true }
|
17
|
-
typing-extensions = "*"
|
19
|
+
[project.optional-dependencies]
|
20
|
+
asyncpg = [
|
21
|
+
"asyncpg",
|
22
|
+
]
|
18
23
|
|
19
|
-
[
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
24
|
+
[dependency-groups]
|
25
|
+
dev = [
|
26
|
+
"SQLAlchemy",
|
27
|
+
"asyncpg",
|
28
|
+
"bump2version",
|
29
|
+
"flake8",
|
30
|
+
"grip",
|
31
|
+
"ipython",
|
32
|
+
"mypy",
|
33
|
+
"pytest",
|
34
|
+
"pytest-asyncio",
|
35
|
+
"pytest-cov",
|
36
|
+
"ruff",
|
37
|
+
]
|
31
38
|
|
32
39
|
[build-system]
|
33
|
-
requires = ["
|
34
|
-
build-backend = "
|
40
|
+
requires = ["hatchling"]
|
41
|
+
build-backend = "hatchling.build"
|
35
42
|
|
36
43
|
[tool.ruff]
|
37
44
|
target-version = "py39"
|
@@ -62,7 +69,6 @@ ignore = [
|
|
62
69
|
"E501", # line too long
|
63
70
|
"E721", # type checks, currently broken
|
64
71
|
"ISC001", # conflicts with ruff format
|
65
|
-
"PT004", # Fixture `...` does not return anything, add leading underscore
|
66
72
|
"RET505", # Unnecessary `else` after `return` statement
|
67
73
|
"RET506", # Unnecessary `else` after `raise` statement
|
68
74
|
]
|
sql_athame-0.4.0a11/run
ADDED
@@ -0,0 +1,40 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
|
3
|
+
usage=()
|
4
|
+
|
5
|
+
usage+=(" $0 tests - run tests")
|
6
|
+
tests() {
|
7
|
+
uv run pytest "$@"
|
8
|
+
lint
|
9
|
+
}
|
10
|
+
|
11
|
+
usage+=(" $0 refmt - reformat code")
|
12
|
+
refmt() {
|
13
|
+
uv run ruff check --select I --fix
|
14
|
+
uv run ruff format
|
15
|
+
}
|
16
|
+
|
17
|
+
usage+=(" $0 lint - run linting")
|
18
|
+
lint() {
|
19
|
+
uv run ruff check
|
20
|
+
uv run ruff format --diff
|
21
|
+
uv run mypy sql_athame/**.py tests/**.py
|
22
|
+
}
|
23
|
+
|
24
|
+
usage+=(" $0 bump2version {major|minor|patch} - bump version number")
|
25
|
+
bump2version() {
|
26
|
+
uv run bump2version "$@"
|
27
|
+
}
|
28
|
+
|
29
|
+
cmd=$1
|
30
|
+
shift
|
31
|
+
|
32
|
+
if ! declare -f "$cmd" >/dev/null; then
|
33
|
+
echo "Usage:"
|
34
|
+
for line in "${usage[@]}"; do echo "$line"; done
|
35
|
+
exit 1
|
36
|
+
fi
|
37
|
+
|
38
|
+
set -o xtrace
|
39
|
+
|
40
|
+
"$cmd" "$@"
|
@@ -8,6 +8,7 @@ from typing import (
|
|
8
8
|
Annotated,
|
9
9
|
Any,
|
10
10
|
Callable,
|
11
|
+
Generic,
|
11
12
|
Optional,
|
12
13
|
TypeVar,
|
13
14
|
Union,
|
@@ -156,7 +157,7 @@ class ModelBase:
|
|
156
157
|
_cache: dict[tuple, Any]
|
157
158
|
table_name: str
|
158
159
|
primary_key_names: tuple[str, ...]
|
159
|
-
|
160
|
+
insert_multiple_mode: str
|
160
161
|
|
161
162
|
def __init_subclass__(
|
162
163
|
cls,
|
@@ -168,12 +169,9 @@ class ModelBase:
|
|
168
169
|
):
|
169
170
|
cls._cache = {}
|
170
171
|
cls.table_name = table_name
|
171
|
-
if insert_multiple_mode
|
172
|
-
cls.array_safe_insert = True
|
173
|
-
elif insert_multiple_mode == "unnest":
|
174
|
-
cls.array_safe_insert = False
|
175
|
-
else:
|
172
|
+
if insert_multiple_mode not in ("array_safe", "unnest", "executemany"):
|
176
173
|
raise ValueError("Unknown `insert_multiple_mode`")
|
174
|
+
cls.insert_multiple_mode = insert_multiple_mode
|
177
175
|
if isinstance(primary_key, str):
|
178
176
|
cls.primary_key_names = (primary_key,)
|
179
177
|
else:
|
@@ -251,7 +249,7 @@ class ModelBase:
|
|
251
249
|
if ci.field.name not in exclude:
|
252
250
|
if ci.serialize:
|
253
251
|
env[f"_ser_{ci.field.name}"] = ci.serialize
|
254
|
-
func.append(f"_ser_{ci.field.name}(self.{ci.field.name}),
|
252
|
+
func.append(f"_ser_{ci.field.name}(self.{ci.field.name}),")
|
255
253
|
else:
|
256
254
|
func.append(f"self.{ci.field.name},")
|
257
255
|
func += ["]"]
|
@@ -356,7 +354,17 @@ class ModelBase:
|
|
356
354
|
return query
|
357
355
|
|
358
356
|
@classmethod
|
359
|
-
async def
|
357
|
+
async def cursor_from(
|
358
|
+
cls: type[T],
|
359
|
+
connection: Connection,
|
360
|
+
query: Fragment,
|
361
|
+
prefetch: int = 1000,
|
362
|
+
) -> AsyncGenerator[T, None]:
|
363
|
+
async for row in connection.cursor(*query, prefetch=prefetch):
|
364
|
+
yield cls.from_mapping(row)
|
365
|
+
|
366
|
+
@classmethod
|
367
|
+
def select_cursor(
|
360
368
|
cls: type[T],
|
361
369
|
connection: Connection,
|
362
370
|
order_by: Union[FieldNames, str] = (),
|
@@ -364,11 +372,19 @@ class ModelBase:
|
|
364
372
|
where: Where = (),
|
365
373
|
prefetch: int = 1000,
|
366
374
|
) -> AsyncGenerator[T, None]:
|
367
|
-
|
368
|
-
|
375
|
+
return cls.cursor_from(
|
376
|
+
connection,
|
377
|
+
cls.select_sql(order_by=order_by, for_update=for_update, where=where),
|
369
378
|
prefetch=prefetch,
|
370
|
-
)
|
371
|
-
|
379
|
+
)
|
380
|
+
|
381
|
+
@classmethod
|
382
|
+
async def fetch_from(
|
383
|
+
cls: type[T],
|
384
|
+
connection_or_pool: Union[Connection, Pool],
|
385
|
+
query: Fragment,
|
386
|
+
) -> list[T]:
|
387
|
+
return [cls.from_mapping(row) for row in await connection_or_pool.fetch(*query)]
|
372
388
|
|
373
389
|
@classmethod
|
374
390
|
async def select(
|
@@ -378,12 +394,10 @@ class ModelBase:
|
|
378
394
|
for_update: bool = False,
|
379
395
|
where: Where = (),
|
380
396
|
) -> list[T]:
|
381
|
-
return
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
)
|
386
|
-
]
|
397
|
+
return await cls.fetch_from(
|
398
|
+
connection_or_pool,
|
399
|
+
cls.select_sql(order_by=order_by, for_update=for_update, where=where),
|
400
|
+
)
|
387
401
|
|
388
402
|
@classmethod
|
389
403
|
def create_sql(cls: type[T], **kwargs: Any) -> Fragment:
|
@@ -505,6 +519,37 @@ class ModelBase:
|
|
505
519
|
),
|
506
520
|
)
|
507
521
|
|
522
|
+
@classmethod
|
523
|
+
def insert_multiple_executemany_chunk_sql(
|
524
|
+
cls: type[T], chunk_size: int
|
525
|
+
) -> Fragment:
|
526
|
+
def generate() -> Fragment:
|
527
|
+
columns = len(cls.column_info())
|
528
|
+
values = ", ".join(
|
529
|
+
f"({', '.join(f'${i}' for i in chunk)})"
|
530
|
+
for chunk in chunked(range(1, columns * chunk_size + 1), columns)
|
531
|
+
)
|
532
|
+
return sql(
|
533
|
+
"INSERT INTO {table} ({fields}) VALUES {values}",
|
534
|
+
table=cls.table_name_sql(),
|
535
|
+
fields=sql.list(cls.field_names_sql()),
|
536
|
+
values=sql.literal(values),
|
537
|
+
).flatten()
|
538
|
+
|
539
|
+
return cls._cached(
|
540
|
+
("insert_multiple_executemany_chunk", chunk_size),
|
541
|
+
generate,
|
542
|
+
)
|
543
|
+
|
544
|
+
@classmethod
|
545
|
+
async def insert_multiple_executemany(
|
546
|
+
cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
|
547
|
+
) -> None:
|
548
|
+
args = [r.field_values() for r in rows]
|
549
|
+
query = cls.insert_multiple_executemany_chunk_sql(1).query()[0]
|
550
|
+
if args:
|
551
|
+
await connection_or_pool.executemany(query, args)
|
552
|
+
|
508
553
|
@classmethod
|
509
554
|
async def insert_multiple_unnest(
|
510
555
|
cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
|
@@ -526,11 +571,28 @@ class ModelBase:
|
|
526
571
|
async def insert_multiple(
|
527
572
|
cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
|
528
573
|
) -> str:
|
529
|
-
if cls.
|
574
|
+
if cls.insert_multiple_mode == "executemany":
|
575
|
+
await cls.insert_multiple_executemany(connection_or_pool, rows)
|
576
|
+
return "INSERT"
|
577
|
+
elif cls.insert_multiple_mode == "array_safe":
|
530
578
|
return await cls.insert_multiple_array_safe(connection_or_pool, rows)
|
531
579
|
else:
|
532
580
|
return await cls.insert_multiple_unnest(connection_or_pool, rows)
|
533
581
|
|
582
|
+
@classmethod
|
583
|
+
async def upsert_multiple_executemany(
|
584
|
+
cls: type[T],
|
585
|
+
connection_or_pool: Union[Connection, Pool],
|
586
|
+
rows: Iterable[T],
|
587
|
+
insert_only: FieldNamesSet = (),
|
588
|
+
) -> None:
|
589
|
+
args = [r.field_values() for r in rows]
|
590
|
+
query = cls.upsert_sql(
|
591
|
+
cls.insert_multiple_executemany_chunk_sql(1), exclude=insert_only
|
592
|
+
).query()[0]
|
593
|
+
if args:
|
594
|
+
await connection_or_pool.executemany(query, args)
|
595
|
+
|
534
596
|
@classmethod
|
535
597
|
async def upsert_multiple_unnest(
|
536
598
|
cls: type[T],
|
@@ -565,7 +627,12 @@ class ModelBase:
|
|
565
627
|
rows: Iterable[T],
|
566
628
|
insert_only: FieldNamesSet = (),
|
567
629
|
) -> str:
|
568
|
-
if cls.
|
630
|
+
if cls.insert_multiple_mode == "executemany":
|
631
|
+
await cls.upsert_multiple_executemany(
|
632
|
+
connection_or_pool, rows, insert_only=insert_only
|
633
|
+
)
|
634
|
+
return "INSERT"
|
635
|
+
elif cls.insert_multiple_mode == "array_safe":
|
569
636
|
return await cls.upsert_multiple_array_safe(
|
570
637
|
connection_or_pool, rows, insert_only=insert_only
|
571
638
|
)
|
@@ -588,7 +655,7 @@ class ModelBase:
|
|
588
655
|
return env["equal_ignoring"]
|
589
656
|
|
590
657
|
@classmethod
|
591
|
-
async def
|
658
|
+
async def plan_replace_multiple(
|
592
659
|
cls: type[T],
|
593
660
|
connection: Connection,
|
594
661
|
rows: Union[Iterable[T], Iterable[Mapping[str, Any]]],
|
@@ -596,7 +663,7 @@ class ModelBase:
|
|
596
663
|
where: Where,
|
597
664
|
ignore: FieldNamesSet = (),
|
598
665
|
insert_only: FieldNamesSet = (),
|
599
|
-
) ->
|
666
|
+
) -> "ReplaceMultiplePlan[T]":
|
600
667
|
ignore = sorted(set(ignore) | set(insert_only))
|
601
668
|
equal_ignoring = cls._cached(
|
602
669
|
("equal_ignoring", tuple(ignore)),
|
@@ -620,14 +687,23 @@ class ModelBase:
|
|
620
687
|
|
621
688
|
created = list(pending.values())
|
622
689
|
|
623
|
-
|
624
|
-
await cls.upsert_multiple(
|
625
|
-
connection, (*created, *updated), insert_only=insert_only
|
626
|
-
)
|
627
|
-
if deleted:
|
628
|
-
await cls.delete_multiple(connection, deleted)
|
690
|
+
return ReplaceMultiplePlan(cls, insert_only, created, updated, deleted)
|
629
691
|
|
630
|
-
|
692
|
+
@classmethod
|
693
|
+
async def replace_multiple(
|
694
|
+
cls: type[T],
|
695
|
+
connection: Connection,
|
696
|
+
rows: Union[Iterable[T], Iterable[Mapping[str, Any]]],
|
697
|
+
*,
|
698
|
+
where: Where,
|
699
|
+
ignore: FieldNamesSet = (),
|
700
|
+
insert_only: FieldNamesSet = (),
|
701
|
+
) -> tuple[list[T], list[T], list[T]]:
|
702
|
+
plan = await cls.plan_replace_multiple(
|
703
|
+
connection, rows, where=where, ignore=ignore, insert_only=insert_only
|
704
|
+
)
|
705
|
+
await plan.execute(connection)
|
706
|
+
return plan.cud
|
631
707
|
|
632
708
|
@classmethod
|
633
709
|
def _get_differences_ignoring_fn(
|
@@ -694,6 +770,33 @@ class ModelBase:
|
|
694
770
|
return created, updated_triples, deleted
|
695
771
|
|
696
772
|
|
773
|
+
@dataclass
|
774
|
+
class ReplaceMultiplePlan(Generic[T]):
|
775
|
+
model_class: type[T]
|
776
|
+
insert_only: FieldNamesSet
|
777
|
+
created: list[T]
|
778
|
+
updated: list[T]
|
779
|
+
deleted: list[T]
|
780
|
+
|
781
|
+
@property
|
782
|
+
def cud(self) -> tuple[list[T], list[T], list[T]]:
|
783
|
+
return (self.created, self.updated, self.deleted)
|
784
|
+
|
785
|
+
async def execute_upserts(self, connection: Connection) -> None:
|
786
|
+
if self.created or self.updated:
|
787
|
+
await self.model_class.upsert_multiple(
|
788
|
+
connection, (*self.created, *self.updated), insert_only=self.insert_only
|
789
|
+
)
|
790
|
+
|
791
|
+
async def execute_deletes(self, connection: Connection) -> None:
|
792
|
+
if self.deleted:
|
793
|
+
await self.model_class.delete_multiple(connection, self.deleted)
|
794
|
+
|
795
|
+
async def execute(self, connection: Connection) -> None:
|
796
|
+
await self.execute_upserts(connection)
|
797
|
+
await self.execute_deletes(connection)
|
798
|
+
|
799
|
+
|
697
800
|
def chunked(lst, n):
|
698
801
|
if type(lst) is not list:
|
699
802
|
lst = list(lst)
|
File without changes
|