sql-athame 0.4.0a12__py3-none-any.whl → 0.4.0a14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sql_athame/dataclasses.py CHANGED
@@ -33,6 +33,36 @@ Pool: TypeAlias = Any
33
33
 
34
34
  @dataclass
35
35
  class ColumnInfo:
36
+ """Column metadata for dataclass fields.
37
+
38
+ This class specifies SQL column properties that can be applied to dataclass fields
39
+ to control how they are mapped to database columns.
40
+
41
+ Attributes:
42
+ type: SQL type name for query parameters (e.g., 'TEXT', 'INTEGER')
43
+ create_type: SQL type for CREATE TABLE statements (defaults to type if not specified)
44
+ nullable: Whether the column allows NULL values (inferred from Optional types if not specified)
45
+ constraints: Additional SQL constraints (e.g., 'UNIQUE', 'CHECK (value > 0)')
46
+ serialize: Function to transform Python values before database storage
47
+ deserialize: Function to transform database values back to Python objects
48
+ insert_only: Whether this field should only be set on INSERT, not UPDATE in upsert operations
49
+ replace_ignore: Whether this field should be ignored for `replace_multiple`
50
+
51
+ Example:
52
+ >>> from dataclasses import dataclass
53
+ >>> from typing import Annotated
54
+ >>> from sql_athame import ModelBase, ColumnInfo
55
+ >>> import json
56
+ >>>
57
+ >>> @dataclass
58
+ ... class Product(ModelBase, table_name="products", primary_key="id"):
59
+ ... id: int
60
+ ... name: str
61
+ ... price: Annotated[float, ColumnInfo(constraints="CHECK (price > 0)")]
62
+ ... tags: Annotated[list, ColumnInfo(type="JSONB", serialize=json.dumps, deserialize=json.loads)]
63
+ ... created_at: Annotated[datetime, ColumnInfo(insert_only=True)]
64
+ """
65
+
36
66
  type: Optional[str] = None
37
67
  create_type: Optional[str] = None
38
68
  nullable: Optional[bool] = None
@@ -42,6 +72,8 @@ class ColumnInfo:
42
72
 
43
73
  serialize: Optional[Callable[[Any], Any]] = None
44
74
  deserialize: Optional[Callable[[Any], Any]] = None
75
+ insert_only: Optional[bool] = None
76
+ replace_ignore: Optional[bool] = None
45
77
 
46
78
  def __post_init__(self, constraints: Union[str, Iterable[str], None]) -> None:
47
79
  if constraints is not None:
@@ -51,6 +83,15 @@ class ColumnInfo:
51
83
 
52
84
  @staticmethod
53
85
  def merge(a: "ColumnInfo", b: "ColumnInfo") -> "ColumnInfo":
86
+ """Merge two ColumnInfo instances, with b taking precedence over a.
87
+
88
+ Args:
89
+ a: Base ColumnInfo
90
+ b: ColumnInfo to overlay on top of a
91
+
92
+ Returns:
93
+ New ColumnInfo with b's non-None values overriding a's values
94
+ """
54
95
  return ColumnInfo(
55
96
  type=b.type if b.type is not None else a.type,
56
97
  create_type=b.create_type if b.create_type is not None else a.create_type,
@@ -58,24 +99,61 @@ class ColumnInfo:
58
99
  _constraints=(*a._constraints, *b._constraints),
59
100
  serialize=b.serialize if b.serialize is not None else a.serialize,
60
101
  deserialize=b.deserialize if b.deserialize is not None else a.deserialize,
102
+ insert_only=b.insert_only if b.insert_only is not None else a.insert_only,
103
+ replace_ignore=(
104
+ b.replace_ignore if b.replace_ignore is not None else a.replace_ignore
105
+ ),
61
106
  )
62
107
 
63
108
 
64
109
  @dataclass
65
110
  class ConcreteColumnInfo:
111
+ """Resolved column information for a specific dataclass field.
112
+
113
+ This is the final, computed column metadata after resolving type hints,
114
+ merging ColumnInfo instances, and applying defaults.
115
+
116
+ Attributes:
117
+ field: The dataclass Field object
118
+ type_hint: The resolved Python type hint
119
+ type: SQL type for query parameters
120
+ create_type: SQL type for CREATE TABLE statements
121
+ nullable: Whether the column allows NULL values
122
+ constraints: Tuple of SQL constraint strings
123
+ serialize: Optional serialization function
124
+ deserialize: Optional deserialization function
125
+ insert_only: Whether this field should only be set on INSERT, not UPDATE
126
+ replace_ignore: Whether this field should be ignored for `replace_multiple`
127
+ """
128
+
66
129
  field: Field
67
130
  type_hint: type
68
131
  type: str
69
132
  create_type: str
70
133
  nullable: bool
71
134
  constraints: tuple[str, ...]
72
- serialize: Optional[Callable[[Any], Any]] = None
73
- deserialize: Optional[Callable[[Any], Any]] = None
135
+ serialize: Optional[Callable[[Any], Any]]
136
+ deserialize: Optional[Callable[[Any], Any]]
137
+ insert_only: bool
138
+ replace_ignore: bool
74
139
 
75
140
  @staticmethod
76
141
  def from_column_info(
77
142
  field: Field, type_hint: Any, *args: ColumnInfo
78
143
  ) -> "ConcreteColumnInfo":
144
+ """Create ConcreteColumnInfo from a field and its ColumnInfo metadata.
145
+
146
+ Args:
147
+ field: The dataclass Field
148
+ type_hint: The resolved type hint for the field
149
+ *args: ColumnInfo instances to merge (later ones take precedence)
150
+
151
+ Returns:
152
+ ConcreteColumnInfo with all metadata resolved
153
+
154
+ Raises:
155
+ ValueError: If no SQL type can be determined for the field
156
+ """
79
157
  info = functools.reduce(ColumnInfo.merge, args, ColumnInfo())
80
158
  if info.create_type is None and info.type is not None:
81
159
  info.create_type = info.type
@@ -91,9 +169,16 @@ class ConcreteColumnInfo:
91
169
  constraints=info._constraints,
92
170
  serialize=info.serialize,
93
171
  deserialize=info.deserialize,
172
+ insert_only=bool(info.insert_only),
173
+ replace_ignore=bool(info.replace_ignore),
94
174
  )
95
175
 
96
176
  def create_table_string(self) -> str:
177
+ """Generate the SQL column definition for CREATE TABLE statements.
178
+
179
+ Returns:
180
+ SQL string like "TEXT NOT NULL CHECK (length > 0)"
181
+ """
97
182
  parts = (
98
183
  self.create_type,
99
184
  *(() if self.nullable else ("NOT NULL",)),
@@ -102,6 +187,14 @@ class ConcreteColumnInfo:
102
187
  return " ".join(parts)
103
188
 
104
189
  def maybe_serialize(self, value: Any) -> Any:
190
+ """Apply serialization function if configured, otherwise return value unchanged.
191
+
192
+ Args:
193
+ value: The Python value to potentially serialize
194
+
195
+ Returns:
196
+ Serialized value if serialize function is configured, otherwise original value
197
+ """
105
198
  if self.serialize:
106
199
  return self.serialize(value)
107
200
  return value
@@ -179,6 +272,15 @@ class ModelBase:
179
272
 
180
273
  @classmethod
181
274
  def _cached(cls, key: tuple, thunk: Callable[[], U]) -> U:
275
+ """Cache computation results by key.
276
+
277
+ Args:
278
+ key: Cache key tuple
279
+ thunk: Function to compute the value if not cached
280
+
281
+ Returns:
282
+ Cached or computed value
283
+ """
182
284
  try:
183
285
  return cls._cache[key]
184
286
  except KeyError:
@@ -187,6 +289,18 @@ class ModelBase:
187
289
 
188
290
  @classmethod
189
291
  def column_info_for_field(cls, field: Field, type_hint: type) -> ConcreteColumnInfo:
292
+ """Generate ConcreteColumnInfo for a dataclass field.
293
+
294
+ Analyzes the field's type hint and metadata to determine SQL column properties.
295
+ Looks for ColumnInfo in the field's metadata and merges it with type-based defaults.
296
+
297
+ Args:
298
+ field: The dataclass Field object
299
+ type_hint: The resolved type hint for the field
300
+
301
+ Returns:
302
+ ConcreteColumnInfo with all column metadata resolved
303
+ """
190
304
  base_type = type_hint
191
305
  metadata = []
192
306
  if get_origin(type_hint) is Annotated:
@@ -202,6 +316,14 @@ class ModelBase:
202
316
 
203
317
  @classmethod
204
318
  def column_info(cls) -> dict[str, ConcreteColumnInfo]:
319
+ """Get column information for all fields in this model.
320
+
321
+ Returns a cached mapping of field names to their resolved column information.
322
+ This is computed once per class and cached for performance.
323
+
324
+ Returns:
325
+ Dictionary mapping field names to ConcreteColumnInfo objects
326
+ """
205
327
  try:
206
328
  return cls._column_info
207
329
  except AttributeError:
@@ -214,20 +336,78 @@ class ModelBase:
214
336
 
215
337
  @classmethod
216
338
  def table_name_sql(cls, *, prefix: Optional[str] = None) -> Fragment:
339
+ """Generate SQL fragment for the table name.
340
+
341
+ Args:
342
+ prefix: Optional schema or alias prefix
343
+
344
+ Returns:
345
+ Fragment containing the properly quoted table identifier
346
+
347
+ Example:
348
+ >>> list(User.table_name_sql())
349
+ ['"users"']
350
+ >>> list(User.table_name_sql(prefix="public"))
351
+ ['"public"."users"']
352
+ """
217
353
  return sql.identifier(cls.table_name, prefix=prefix)
218
354
 
219
355
  @classmethod
220
356
  def primary_key_names_sql(cls, *, prefix: Optional[str] = None) -> list[Fragment]:
357
+ """Generate SQL fragments for primary key column names.
358
+
359
+ Args:
360
+ prefix: Optional table alias prefix
361
+
362
+ Returns:
363
+ List of Fragment objects for each primary key column
364
+ """
221
365
  return [sql.identifier(pk, prefix=prefix) for pk in cls.primary_key_names]
222
366
 
223
367
  @classmethod
224
368
  def field_names(cls, *, exclude: FieldNamesSet = ()) -> list[str]:
369
+ """Get list of field names for this model.
370
+
371
+ Args:
372
+ exclude: Field names to exclude from the result
373
+
374
+ Returns:
375
+ List of field names as strings
376
+ """
225
377
  return [
226
378
  ci.field.name
227
379
  for ci in cls.column_info().values()
228
380
  if ci.field.name not in exclude
229
381
  ]
230
382
 
383
+ @classmethod
384
+ def insert_only_field_names(cls) -> set[str]:
385
+ """Get set of field names marked as insert_only in ColumnInfo.
386
+
387
+ Returns:
388
+ Set of field names that should only be set on INSERT, not UPDATE
389
+ """
390
+ return cls._cached(
391
+ ("insert_only_field_names",),
392
+ lambda: {
393
+ ci.field.name for ci in cls.column_info().values() if ci.insert_only
394
+ },
395
+ )
396
+
397
+ @classmethod
398
+ def replace_ignore_field_names(cls) -> set[str]:
399
+ """Get set of field names marked as replace_ignore in ColumnInfo.
400
+
401
+ Returns:
402
+ Set of field names that should be ignored for `replace_multiple`
403
+ """
404
+ return cls._cached(
405
+ ("replace_ignore_field_names",),
406
+ lambda: {
407
+ ci.field.name for ci in cls.column_info().values() if ci.replace_ignore
408
+ },
409
+ )
410
+
231
411
  @classmethod
232
412
  def field_names_sql(
233
413
  cls,
@@ -236,6 +416,24 @@ class ModelBase:
236
416
  exclude: FieldNamesSet = (),
237
417
  as_prepended: Optional[str] = None,
238
418
  ) -> list[Fragment]:
419
+ """Generate SQL fragments for field names.
420
+
421
+ Args:
422
+ prefix: Optional table alias prefix for column names
423
+ exclude: Field names to exclude from the result
424
+ as_prepended: If provided, generate "column AS prepended_column" aliases
425
+
426
+ Returns:
427
+ List of Fragment objects for each field
428
+
429
+ Example:
430
+ >>> list(sql.list(User.field_names_sql()))
431
+ ['"id", "name", "email"']
432
+ >>> list(sql.list(User.field_names_sql(prefix="u")))
433
+ ['"u"."id", "u"."name", "u"."email"']
434
+ >>> list(sql.list(User.field_names_sql(as_prepended="user_")))
435
+ ['"id" AS "user_id", "name" AS "user_name", "email" AS "user_email"']
436
+ """
239
437
  if as_prepended:
240
438
  return [
241
439
  sql(
@@ -250,12 +448,33 @@ class ModelBase:
250
448
  ]
251
449
 
252
450
  def primary_key(self) -> tuple:
451
+ """Get the primary key value(s) for this instance.
452
+
453
+ Returns:
454
+ Tuple containing the primary key field values
455
+
456
+ Example:
457
+ >>> user = User(id=UUID(...), name="Alice")
458
+ >>> user.primary_key()
459
+ (UUID('...'),)
460
+ """
253
461
  return tuple(getattr(self, pk) for pk in self.primary_key_names)
254
462
 
255
463
  @classmethod
256
464
  def _get_field_values_fn(
257
465
  cls: type[T], exclude: FieldNamesSet = ()
258
466
  ) -> Callable[[T], list[Any]]:
467
+ """Generate optimized function to extract field values from instances.
468
+
469
+ This method generates and compiles a function that efficiently extracts
470
+ field values from model instances, applying serialization where needed.
471
+
472
+ Args:
473
+ exclude: Field names to exclude from value extraction
474
+
475
+ Returns:
476
+ Compiled function that takes an instance and returns field values
477
+ """
259
478
  env: dict[str, Any] = {}
260
479
  func = ["def get_field_values(self): return ["]
261
480
  for ci in cls.column_info().values():
@@ -270,6 +489,17 @@ class ModelBase:
270
489
  return env["get_field_values"]
271
490
 
272
491
  def field_values(self, *, exclude: FieldNamesSet = ()) -> list[Any]:
492
+ """Get field values for this instance, with serialization applied.
493
+
494
+ Args:
495
+ exclude: Field names to exclude from the result
496
+
497
+ Returns:
498
+ List of field values in the same order as field_names()
499
+
500
+ Note:
501
+ This method applies any configured serialize functions to the values.
502
+ """
273
503
  get_field_values = self._cached(
274
504
  ("get_field_values", tuple(sorted(exclude))),
275
505
  lambda: self._get_field_values_fn(exclude),
@@ -279,6 +509,15 @@ class ModelBase:
279
509
  def field_values_sql(
280
510
  self, *, exclude: FieldNamesSet = (), default_none: bool = False
281
511
  ) -> list[Fragment]:
512
+ """Generate SQL fragments for field values.
513
+
514
+ Args:
515
+ exclude: Field names to exclude
516
+ default_none: If True, None values become DEFAULT literals instead of NULL
517
+
518
+ Returns:
519
+ List of Fragment objects containing value placeholders or DEFAULT
520
+ """
282
521
  if default_none:
283
522
  return [
284
523
  sql.literal("DEFAULT") if value is None else sql.value(value)
@@ -289,6 +528,15 @@ class ModelBase:
289
528
 
290
529
  @classmethod
291
530
  def _get_from_mapping_fn(cls: type[T]) -> Callable[[Mapping[str, Any]], T]:
531
+ """Generate optimized function to create instances from mappings.
532
+
533
+ This method generates and compiles a function that efficiently creates
534
+ model instances from dictionary-like mappings, applying deserialization
535
+ where needed.
536
+
537
+ Returns:
538
+ Compiled function that takes a mapping and returns a model instance
539
+ """
292
540
  env: dict[str, Any] = {"cls": cls}
293
541
  func = ["def from_mapping(mapping):"]
294
542
  if not any(ci.deserialize for ci in cls.column_info().values()):
@@ -308,6 +556,21 @@ class ModelBase:
308
556
 
309
557
  @classmethod
310
558
  def from_mapping(cls: type[T], mapping: Mapping[str, Any], /) -> T:
559
+ """Create a model instance from a dictionary-like mapping.
560
+
561
+ This method applies any configured deserialize functions to the values
562
+ before creating the instance.
563
+
564
+ Args:
565
+ mapping: Dictionary-like object with field names as keys
566
+
567
+ Returns:
568
+ New instance of this model class
569
+
570
+ Example:
571
+ >>> row = {"id": UUID(...), "name": "Alice", "email": None}
572
+ >>> user = User.from_mapping(row)
573
+ """
311
574
  # KLUDGE nasty but... efficient?
312
575
  from_mapping_fn = cls._get_from_mapping_fn()
313
576
  cls.from_mapping = from_mapping_fn # type: ignore
@@ -317,6 +580,22 @@ class ModelBase:
317
580
  def from_prepended_mapping(
318
581
  cls: type[T], mapping: Mapping[str, Any], prepend: str
319
582
  ) -> T:
583
+ """Create a model instance from a mapping with prefixed keys.
584
+
585
+ Useful for creating instances from JOIN query results where columns
586
+ are prefixed to avoid name conflicts.
587
+
588
+ Args:
589
+ mapping: Dictionary with prefixed keys
590
+ prepend: Prefix to strip from keys
591
+
592
+ Returns:
593
+ New instance of this model class
594
+
595
+ Example:
596
+ >>> row = {"user_id": UUID(...), "user_name": "Alice", "user_email": None}
597
+ >>> user = User.from_prepended_mapping(row, "user_")
598
+ """
320
599
  filtered_dict: dict[str, Any] = {}
321
600
  for k, v in mapping.items():
322
601
  if k.startswith(prepend):
@@ -325,12 +604,29 @@ class ModelBase:
325
604
 
326
605
  @classmethod
327
606
  def ensure_model(cls: type[T], row: Union[T, Mapping[str, Any]]) -> T:
607
+ """Ensure the input is a model instance, converting from mapping if needed.
608
+
609
+ Args:
610
+ row: Either a model instance or a mapping to convert
611
+
612
+ Returns:
613
+ Model instance
614
+ """
328
615
  if isinstance(row, cls):
329
616
  return row
330
617
  return cls.from_mapping(row) # type: ignore
331
618
 
332
619
  @classmethod
333
620
  def create_table_sql(cls) -> Fragment:
621
+ """Generate CREATE TABLE SQL for this model.
622
+
623
+ Returns:
624
+ Fragment containing CREATE TABLE IF NOT EXISTS statement
625
+
626
+ Example:
627
+ >>> list(User.create_table_sql())
628
+ ['CREATE TABLE IF NOT EXISTS "users" ("id" UUID NOT NULL, "name" TEXT NOT NULL, "email" TEXT, PRIMARY KEY ("id"))']
629
+ """
334
630
  entries = [
335
631
  sql(
336
632
  "{} {}",
@@ -354,6 +650,20 @@ class ModelBase:
354
650
  order_by: Union[FieldNames, str] = (),
355
651
  for_update: bool = False,
356
652
  ) -> Fragment:
653
+ """Generate SELECT SQL for this model.
654
+
655
+ Args:
656
+ where: WHERE conditions as Fragment or iterable of Fragments
657
+ order_by: ORDER BY field names
658
+ for_update: Whether to add FOR UPDATE clause
659
+
660
+ Returns:
661
+ Fragment containing SELECT statement
662
+
663
+ Example:
664
+ >>> list(User.select_sql(where=sql("name = {}", "Alice")))
665
+ ['SELECT "id", "name", "email" FROM "users" WHERE name = $1', 'Alice']
666
+ """
357
667
  if isinstance(order_by, str):
358
668
  order_by = (order_by,)
359
669
  if not isinstance(where, Fragment):
@@ -383,6 +693,16 @@ class ModelBase:
383
693
  query: Fragment,
384
694
  prefetch: int = 1000,
385
695
  ) -> AsyncGenerator[T, None]:
696
+ """Create an async generator from a query result.
697
+
698
+ Args:
699
+ connection: Database connection
700
+ query: SQL query Fragment
701
+ prefetch: Number of rows to prefetch
702
+
703
+ Yields:
704
+ Model instances from the query results
705
+ """
386
706
  async for row in connection.cursor(*query, prefetch=prefetch):
387
707
  yield cls.from_mapping(row)
388
708
 
@@ -395,6 +715,22 @@ class ModelBase:
395
715
  where: Where = (),
396
716
  prefetch: int = 1000,
397
717
  ) -> AsyncGenerator[T, None]:
718
+ """Create an async generator for SELECT results.
719
+
720
+ Args:
721
+ connection: Database connection
722
+ order_by: ORDER BY field names
723
+ for_update: Whether to add FOR UPDATE clause
724
+ where: WHERE conditions
725
+ prefetch: Number of rows to prefetch
726
+
727
+ Yields:
728
+ Model instances from the SELECT results
729
+
730
+ Example:
731
+ >>> async for user in User.select_cursor(conn, where=sql("active = {}", True)):
732
+ ... print(user.name)
733
+ """
398
734
  return cls.cursor_from(
399
735
  connection,
400
736
  cls.select_sql(order_by=order_by, for_update=for_update, where=where),
@@ -407,6 +743,15 @@ class ModelBase:
407
743
  connection_or_pool: Union[Connection, Pool],
408
744
  query: Fragment,
409
745
  ) -> list[T]:
746
+ """Execute a query and return model instances.
747
+
748
+ Args:
749
+ connection_or_pool: Database connection or pool
750
+ query: SQL query Fragment
751
+
752
+ Returns:
753
+ List of model instances from the query results
754
+ """
410
755
  return [cls.from_mapping(row) for row in await connection_or_pool.fetch(*query)]
411
756
 
412
757
  @classmethod
@@ -417,6 +762,20 @@ class ModelBase:
417
762
  for_update: bool = False,
418
763
  where: Where = (),
419
764
  ) -> list[T]:
765
+ """Execute a SELECT query and return model instances.
766
+
767
+ Args:
768
+ connection_or_pool: Database connection or pool
769
+ order_by: ORDER BY field names
770
+ for_update: Whether to add FOR UPDATE clause
771
+ where: WHERE conditions
772
+
773
+ Returns:
774
+ List of model instances from the SELECT results
775
+
776
+ Example:
777
+ >>> users = await User.select(pool, where=sql("active = {}", True))
778
+ """
420
779
  return await cls.fetch_from(
421
780
  connection_or_pool,
422
781
  cls.select_sql(order_by=order_by, for_update=for_update, where=where),
@@ -424,6 +783,18 @@ class ModelBase:
424
783
 
425
784
  @classmethod
426
785
  def create_sql(cls: type[T], **kwargs: Any) -> Fragment:
786
+ """Generate INSERT SQL for creating a new record with RETURNING clause.
787
+
788
+ Args:
789
+ **kwargs: Field values for the new record
790
+
791
+ Returns:
792
+ Fragment containing INSERT ... RETURNING statement
793
+
794
+ Example:
795
+ >>> list(User.create_sql(name="Alice", email="alice@example.com"))
796
+ ['INSERT INTO "users" ("name", "email") VALUES ($1, $2) RETURNING "id", "name", "email"', 'Alice', 'alice@example.com']
797
+ """
427
798
  column_info = cls.column_info()
428
799
  return sql(
429
800
  "INSERT INTO {table} ({fields}) VALUES ({values}) RETURNING {out_fields}",
@@ -439,10 +810,35 @@ class ModelBase:
439
810
  async def create(
440
811
  cls: type[T], connection_or_pool: Union[Connection, Pool], **kwargs: Any
441
812
  ) -> T:
813
+ """Create a new record in the database.
814
+
815
+ Args:
816
+ connection_or_pool: Database connection or pool
817
+ **kwargs: Field values for the new record
818
+
819
+ Returns:
820
+ Model instance representing the created record
821
+
822
+ Example:
823
+ >>> user = await User.create(pool, name="Alice", email="alice@example.com")
824
+ """
442
825
  row = await connection_or_pool.fetchrow(*cls.create_sql(**kwargs))
443
826
  return cls.from_mapping(row)
444
827
 
445
828
  def insert_sql(self, exclude: FieldNamesSet = ()) -> Fragment:
829
+ """Generate INSERT SQL for this instance.
830
+
831
+ Args:
832
+ exclude: Field names to exclude from the INSERT
833
+
834
+ Returns:
835
+ Fragment containing INSERT statement
836
+
837
+ Example:
838
+ >>> user = User(name="Alice", email="alice@example.com")
839
+ >>> list(user.insert_sql())
840
+ ['INSERT INTO "users" ("name", "email") VALUES ($1, $2)', 'Alice', 'alice@example.com']
841
+ """
446
842
  cached = self._cached(
447
843
  ("insert_sql", tuple(sorted(exclude))),
448
844
  lambda: sql(
@@ -458,38 +854,136 @@ class ModelBase:
458
854
  async def insert(
459
855
  self, connection_or_pool: Union[Connection, Pool], exclude: FieldNamesSet = ()
460
856
  ) -> str:
857
+ """Insert this instance into the database.
858
+
859
+ Args:
860
+ connection_or_pool: Database connection or pool
861
+ exclude: Field names to exclude from the INSERT
862
+
863
+ Returns:
864
+ Result string from the database operation
865
+ """
461
866
  return await connection_or_pool.execute(*self.insert_sql(exclude))
462
867
 
463
868
  @classmethod
464
- def upsert_sql(cls, insert_sql: Fragment, exclude: FieldNamesSet = ()) -> Fragment:
465
- cached = cls._cached(
466
- ("upsert_sql", tuple(sorted(exclude))),
467
- lambda: sql(
468
- " ON CONFLICT ({pks}) DO UPDATE SET {assignments}",
869
+ def upsert_sql(
870
+ cls,
871
+ insert_sql: Fragment,
872
+ insert_only: FieldNamesSet = (),
873
+ force_update: FieldNamesSet = (),
874
+ ) -> Fragment:
875
+ """Generate UPSERT (INSERT ... ON CONFLICT DO UPDATE) SQL.
876
+
877
+ Args:
878
+ insert_sql: Base INSERT statement Fragment
879
+ insert_only: Field names to exclude from the UPDATE clause
880
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
881
+
882
+ Returns:
883
+ Fragment containing INSERT ... ON CONFLICT DO UPDATE statement
884
+
885
+ Example:
886
+ >>> insert = user.insert_sql()
887
+ >>> list(User.upsert_sql(insert))
888
+ ['INSERT INTO "users" ("name", "email") VALUES ($1, $2) ON CONFLICT ("id") DO UPDATE SET "name"=EXCLUDED."name", "email"=EXCLUDED."email"', 'Alice', 'alice@example.com']
889
+
890
+ Note:
891
+ Fields marked with ColumnInfo(insert_only=True) are automatically
892
+ excluded from the UPDATE clause, unless overridden by force_update.
893
+ """
894
+ # Combine insert_only parameter with auto-detected insert_only fields, but remove force_update fields
895
+ auto_insert_only = cls.insert_only_field_names() - set(force_update)
896
+ manual_insert_only = set(insert_only) - set(
897
+ force_update
898
+ ) # Remove force_update from manual insert_only too
899
+ all_insert_only = manual_insert_only | auto_insert_only
900
+
901
+ def generate_upsert_fragment():
902
+ updatable_fields = cls.field_names(
903
+ exclude=(*cls.primary_key_names, *all_insert_only)
904
+ )
905
+ return sql(
906
+ " ON CONFLICT ({pks}) DO {action}",
469
907
  insert_sql=insert_sql,
470
908
  pks=sql.list(cls.primary_key_names_sql()),
471
- assignments=sql.list(
472
- sql("{field}=EXCLUDED.{field}", field=x)
473
- for x in cls.field_names_sql(
474
- exclude=(*cls.primary_key_names, *exclude)
909
+ action=(
910
+ sql(
911
+ "UPDATE SET {assignments}",
912
+ assignments=sql.list(
913
+ sql("{field}=EXCLUDED.{field}", field=sql.identifier(field))
914
+ for field in updatable_fields
915
+ ),
475
916
  )
917
+ if updatable_fields
918
+ else sql.literal("NOTHING")
476
919
  ),
477
- ).flatten(),
920
+ ).flatten()
921
+
922
+ cached = cls._cached(
923
+ ("upsert_sql", tuple(sorted(all_insert_only))),
924
+ generate_upsert_fragment,
478
925
  )
479
926
  return Fragment([insert_sql, cached])
480
927
 
481
928
  async def upsert(
482
- self, connection_or_pool: Union[Connection, Pool], exclude: FieldNamesSet = ()
929
+ self,
930
+ connection_or_pool: Union[Connection, Pool],
931
+ exclude: FieldNamesSet = (),
932
+ insert_only: FieldNamesSet = (),
933
+ force_update: FieldNamesSet = (),
483
934
  ) -> bool:
935
+ """Insert or update this instance in the database.
936
+
937
+ Args:
938
+ connection_or_pool: Database connection or pool
939
+ exclude: Field names to exclude from INSERT and UPDATE
940
+ insert_only: Field names that should only be set on INSERT, not UPDATE
941
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
942
+
943
+ Returns:
944
+ True if the record was updated, False if it was inserted
945
+
946
+ Example:
947
+ >>> user = User(id=1, name="Alice", created_at=datetime.now())
948
+ >>> # Only set created_at on INSERT, not UPDATE
949
+ >>> was_updated = await user.upsert(pool, insert_only={'created_at'})
950
+ >>> # Force update created_at even if it's marked insert_only in ColumnInfo
951
+ >>> was_updated = await user.upsert(pool, force_update={'created_at'})
952
+
953
+ Note:
954
+ Fields marked with ColumnInfo(insert_only=True) are automatically
955
+ treated as insert-only and combined with the insert_only parameter,
956
+ unless overridden by force_update.
957
+ """
958
+ # upsert_sql automatically handles insert_only fields from ColumnInfo
959
+ # We only need to combine manual insert_only with exclude for the UPDATE clause
960
+ update_exclude = set(exclude) | set(insert_only)
484
961
  query = sql(
485
962
  "{} RETURNING xmax",
486
- self.upsert_sql(self.insert_sql(exclude=exclude), exclude=exclude),
963
+ self.upsert_sql(
964
+ self.insert_sql(exclude=exclude),
965
+ insert_only=update_exclude,
966
+ force_update=force_update,
967
+ ),
487
968
  )
488
969
  result = await connection_or_pool.fetchrow(*query)
489
970
  return result["xmax"] != 0
490
971
 
491
972
  @classmethod
492
973
  def delete_multiple_sql(cls: type[T], rows: Iterable[T]) -> Fragment:
974
+ """Generate DELETE SQL for multiple records.
975
+
976
+ Args:
977
+ rows: Model instances to delete
978
+
979
+ Returns:
980
+ Fragment containing DELETE statement with UNNEST-based WHERE clause
981
+
982
+ Example:
983
+ >>> users = [user1, user2, user3]
984
+ >>> list(User.delete_multiple_sql(users))
985
+ ['DELETE FROM "users" WHERE ("id") IN (SELECT * FROM UNNEST($1::UUID[]))', (uuid1, uuid2, uuid3)]
986
+ """
493
987
  cached = cls._cached(
494
988
  ("delete_multiple_sql",),
495
989
  lambda: sql(
@@ -510,10 +1004,34 @@ class ModelBase:
510
1004
  async def delete_multiple(
511
1005
  cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
512
1006
  ) -> str:
1007
+ """Delete multiple records from the database.
1008
+
1009
+ Args:
1010
+ connection_or_pool: Database connection or pool
1011
+ rows: Model instances to delete
1012
+
1013
+ Returns:
1014
+ Result string from the database operation
1015
+ """
513
1016
  return await connection_or_pool.execute(*cls.delete_multiple_sql(rows))
514
1017
 
515
1018
  @classmethod
516
1019
  def insert_multiple_sql(cls: type[T], rows: Iterable[T]) -> Fragment:
1020
+ """Generate bulk INSERT SQL using UNNEST.
1021
+
1022
+ This is the most efficient method for bulk inserts in PostgreSQL.
1023
+
1024
+ Args:
1025
+ rows: Model instances to insert
1026
+
1027
+ Returns:
1028
+ Fragment containing INSERT ... SELECT FROM UNNEST statement
1029
+
1030
+ Example:
1031
+ >>> users = [User(name="Alice"), User(name="Bob")]
1032
+ >>> list(User.insert_multiple_sql(users))
1033
+ ['INSERT INTO "users" ("name", "email") SELECT * FROM UNNEST($1::TEXT[], $2::TEXT[])', ('Alice', 'Bob'), (None, None)]
1034
+ """
517
1035
  cached = cls._cached(
518
1036
  ("insert_multiple_sql",),
519
1037
  lambda: sql(
@@ -532,6 +1050,18 @@ class ModelBase:
532
1050
 
533
1051
  @classmethod
534
1052
  def insert_multiple_array_safe_sql(cls: type[T], rows: Iterable[T]) -> Fragment:
1053
+ """Generate bulk INSERT SQL using VALUES syntax.
1054
+
1055
+ This method is required when your model contains array columns, because
1056
+ PostgreSQL doesn't support arrays-of-arrays (which UNNEST would require).
1057
+ Use this instead of the UNNEST method when you have array-typed fields.
1058
+
1059
+ Args:
1060
+ rows: Model instances to insert
1061
+
1062
+ Returns:
1063
+ Fragment containing INSERT ... VALUES statement
1064
+ """
535
1065
  return sql(
536
1066
  "INSERT INTO {table} ({fields}) VALUES {values}",
537
1067
  table=cls.table_name_sql(),
@@ -546,6 +1076,15 @@ class ModelBase:
546
1076
  def insert_multiple_executemany_chunk_sql(
547
1077
  cls: type[T], chunk_size: int
548
1078
  ) -> Fragment:
1079
+ """Generate INSERT SQL template for executemany with specific chunk size.
1080
+
1081
+ Args:
1082
+ chunk_size: Number of records per batch
1083
+
1084
+ Returns:
1085
+ Fragment containing INSERT statement with numbered placeholders
1086
+ """
1087
+
549
1088
  def generate() -> Fragment:
550
1089
  columns = len(cls.column_info())
551
1090
  values = ", ".join(
@@ -568,6 +1107,14 @@ class ModelBase:
568
1107
  async def insert_multiple_executemany(
569
1108
  cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
570
1109
  ) -> None:
1110
+ """Insert multiple records using asyncpg's executemany.
1111
+
1112
+ This is the most compatible but slowest bulk insert method.
1113
+
1114
+ Args:
1115
+ connection_or_pool: Database connection or pool
1116
+ rows: Model instances to insert
1117
+ """
571
1118
  args = [r.field_values() for r in rows]
572
1119
  query = cls.insert_multiple_executemany_chunk_sql(1).query()[0]
573
1120
  if args:
@@ -577,12 +1124,36 @@ class ModelBase:
577
1124
  async def insert_multiple_unnest(
578
1125
  cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
579
1126
  ) -> str:
1127
+ """Insert multiple records using PostgreSQL UNNEST.
1128
+
1129
+ This is the most efficient bulk insert method for PostgreSQL.
1130
+
1131
+ Args:
1132
+ connection_or_pool: Database connection or pool
1133
+ rows: Model instances to insert
1134
+
1135
+ Returns:
1136
+ Result string from the database operation
1137
+ """
580
1138
  return await connection_or_pool.execute(*cls.insert_multiple_sql(rows))
581
1139
 
582
1140
  @classmethod
583
1141
  async def insert_multiple_array_safe(
584
1142
  cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
585
1143
  ) -> str:
1144
+ """Insert multiple records using VALUES syntax with chunking.
1145
+
1146
+ This method is required when your model contains array columns, because
1147
+ PostgreSQL doesn't support arrays-of-arrays (which UNNEST would require).
1148
+ Data is processed in chunks to manage memory usage.
1149
+
1150
+ Args:
1151
+ connection_or_pool: Database connection or pool
1152
+ rows: Model instances to insert
1153
+
1154
+ Returns:
1155
+ Result string from the last chunk operation
1156
+ """
586
1157
  last = ""
587
1158
  for chunk in chunked(rows, 100):
588
1159
  last = await connection_or_pool.execute(
@@ -594,6 +1165,21 @@ class ModelBase:
594
1165
  async def insert_multiple(
595
1166
  cls: type[T], connection_or_pool: Union[Connection, Pool], rows: Iterable[T]
596
1167
  ) -> str:
1168
+ """Insert multiple records using the configured insert_multiple_mode.
1169
+
1170
+ Args:
1171
+ connection_or_pool: Database connection or pool
1172
+ rows: Model instances to insert
1173
+
1174
+ Returns:
1175
+ Result string from the database operation
1176
+
1177
+ Note:
1178
+ The actual method used depends on the insert_multiple_mode setting:
1179
+ - 'unnest': Most efficient, uses UNNEST (default)
1180
+ - 'array_safe': Uses VALUES syntax; required when model has array columns
1181
+ - 'executemany': Uses asyncpg's executemany, slowest but most compatible
1182
+ """
597
1183
  if cls.insert_multiple_mode == "executemany":
598
1184
  await cls.insert_multiple_executemany(connection_or_pool, rows)
599
1185
  return "INSERT"
@@ -608,10 +1194,21 @@ class ModelBase:
608
1194
  connection_or_pool: Union[Connection, Pool],
609
1195
  rows: Iterable[T],
610
1196
  insert_only: FieldNamesSet = (),
1197
+ force_update: FieldNamesSet = (),
611
1198
  ) -> None:
1199
+ """Bulk upsert using asyncpg's executemany.
1200
+
1201
+ Args:
1202
+ connection_or_pool: Database connection or pool
1203
+ rows: Model instances to upsert
1204
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1205
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1206
+ """
612
1207
  args = [r.field_values() for r in rows]
613
1208
  query = cls.upsert_sql(
614
- cls.insert_multiple_executemany_chunk_sql(1), exclude=insert_only
1209
+ cls.insert_multiple_executemany_chunk_sql(1),
1210
+ insert_only=insert_only,
1211
+ force_update=force_update,
615
1212
  ).query()[0]
616
1213
  if args:
617
1214
  await connection_or_pool.executemany(query, args)
@@ -622,9 +1219,25 @@ class ModelBase:
622
1219
  connection_or_pool: Union[Connection, Pool],
623
1220
  rows: Iterable[T],
624
1221
  insert_only: FieldNamesSet = (),
1222
+ force_update: FieldNamesSet = (),
625
1223
  ) -> str:
1224
+ """Bulk upsert using PostgreSQL UNNEST.
1225
+
1226
+ Args:
1227
+ connection_or_pool: Database connection or pool
1228
+ rows: Model instances to upsert
1229
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1230
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1231
+
1232
+ Returns:
1233
+ Result string from the database operation
1234
+ """
626
1235
  return await connection_or_pool.execute(
627
- *cls.upsert_sql(cls.insert_multiple_sql(rows), exclude=insert_only)
1236
+ *cls.upsert_sql(
1237
+ cls.insert_multiple_sql(rows),
1238
+ insert_only=insert_only,
1239
+ force_update=force_update,
1240
+ )
628
1241
  )
629
1242
 
630
1243
  @classmethod
@@ -633,12 +1246,29 @@ class ModelBase:
633
1246
  connection_or_pool: Union[Connection, Pool],
634
1247
  rows: Iterable[T],
635
1248
  insert_only: FieldNamesSet = (),
1249
+ force_update: FieldNamesSet = (),
636
1250
  ) -> str:
1251
+ """Bulk upsert using VALUES syntax with chunking.
1252
+
1253
+ This method is required when your model contains array columns, because
1254
+ PostgreSQL doesn't support arrays-of-arrays (which UNNEST would require).
1255
+
1256
+ Args:
1257
+ connection_or_pool: Database connection or pool
1258
+ rows: Model instances to upsert
1259
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1260
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1261
+
1262
+ Returns:
1263
+ Result string from the last chunk operation
1264
+ """
637
1265
  last = ""
638
1266
  for chunk in chunked(rows, 100):
639
1267
  last = await connection_or_pool.execute(
640
1268
  *cls.upsert_sql(
641
- cls.insert_multiple_array_safe_sql(chunk), exclude=insert_only
1269
+ cls.insert_multiple_array_safe_sql(chunk),
1270
+ insert_only=insert_only,
1271
+ force_update=force_update,
642
1272
  )
643
1273
  )
644
1274
  return last
@@ -649,25 +1279,66 @@ class ModelBase:
649
1279
  connection_or_pool: Union[Connection, Pool],
650
1280
  rows: Iterable[T],
651
1281
  insert_only: FieldNamesSet = (),
1282
+ force_update: FieldNamesSet = (),
652
1283
  ) -> str:
1284
+ """Bulk upsert (INSERT ... ON CONFLICT DO UPDATE) multiple records.
1285
+
1286
+ Args:
1287
+ connection_or_pool: Database connection or pool
1288
+ rows: Model instances to upsert
1289
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1290
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1291
+
1292
+ Returns:
1293
+ Result string from the database operation
1294
+
1295
+ Example:
1296
+ >>> await User.upsert_multiple(pool, users, insert_only={'created_at'})
1297
+ >>> await User.upsert_multiple(pool, users, force_update={'created_at'})
1298
+
1299
+ Note:
1300
+ Fields marked with ColumnInfo(insert_only=True) are automatically
1301
+ treated as insert-only and combined with the insert_only parameter,
1302
+ unless overridden by force_update.
1303
+ """
1304
+ # upsert_sql automatically handles insert_only fields from ColumnInfo
1305
+ # Pass manual insert_only parameter through to the specific implementations
1306
+
653
1307
  if cls.insert_multiple_mode == "executemany":
654
1308
  await cls.upsert_multiple_executemany(
655
- connection_or_pool, rows, insert_only=insert_only
1309
+ connection_or_pool,
1310
+ rows,
1311
+ insert_only=insert_only,
1312
+ force_update=force_update,
656
1313
  )
657
1314
  return "INSERT"
658
1315
  elif cls.insert_multiple_mode == "array_safe":
659
1316
  return await cls.upsert_multiple_array_safe(
660
- connection_or_pool, rows, insert_only=insert_only
1317
+ connection_or_pool,
1318
+ rows,
1319
+ insert_only=insert_only,
1320
+ force_update=force_update,
661
1321
  )
662
1322
  else:
663
1323
  return await cls.upsert_multiple_unnest(
664
- connection_or_pool, rows, insert_only=insert_only
1324
+ connection_or_pool,
1325
+ rows,
1326
+ insert_only=insert_only,
1327
+ force_update=force_update,
665
1328
  )
666
1329
 
667
1330
  @classmethod
668
1331
  def _get_equal_ignoring_fn(
669
1332
  cls: type[T], ignore: FieldNamesSet = ()
670
1333
  ) -> Callable[[T, T], bool]:
1334
+ """Generate optimized function to compare instances ignoring certain fields.
1335
+
1336
+ Args:
1337
+ ignore: Field names to ignore during comparison
1338
+
1339
+ Returns:
1340
+ Compiled function that compares two instances, returning True if equal
1341
+ """
671
1342
  env: dict[str, Any] = {}
672
1343
  func = ["def equal_ignoring(a, b):"]
673
1344
  for ci in cls.column_info().values():
@@ -686,8 +1357,39 @@ class ModelBase:
686
1357
  where: Where,
687
1358
  ignore: FieldNamesSet = (),
688
1359
  insert_only: FieldNamesSet = (),
1360
+ force_update: FieldNamesSet = (),
689
1361
  ) -> "ReplaceMultiplePlan[T]":
690
- ignore = sorted(set(ignore) | set(insert_only))
1362
+ """Plan a replace operation by comparing new data with existing records.
1363
+
1364
+ This method analyzes the differences between the provided rows and existing
1365
+ database records, determining which records need to be created, updated, or deleted.
1366
+
1367
+ Args:
1368
+ connection: Database connection (must support FOR UPDATE)
1369
+ rows: New data as model instances or mappings
1370
+ where: WHERE clause to limit which existing records to consider
1371
+ ignore: Field names to ignore when comparing records
1372
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1373
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1374
+
1375
+ Returns:
1376
+ ReplaceMultiplePlan containing the planned operations
1377
+
1378
+ Example:
1379
+ >>> plan = await User.plan_replace_multiple(
1380
+ ... conn, new_users, where=sql("department_id = {}", dept_id)
1381
+ ... )
1382
+ >>> print(f"Will create {len(plan.created)}, update {len(plan.updated)}, delete {len(plan.deleted)}")
1383
+
1384
+ Note:
1385
+ Fields marked with ColumnInfo(insert_only=True) are automatically
1386
+ treated as insert-only and combined with the insert_only parameter,
1387
+ unless overridden by force_update.
1388
+ """
1389
+ # For comparison purposes, combine auto-detected insert_only fields with manual ones
1390
+ all_insert_only = cls.insert_only_field_names() | set(insert_only)
1391
+ default_ignore = cls.replace_ignore_field_names() - set(force_update)
1392
+ ignore = sorted(set(ignore) | default_ignore | all_insert_only)
691
1393
  equal_ignoring = cls._cached(
692
1394
  ("equal_ignoring", tuple(ignore)),
693
1395
  lambda: cls._get_equal_ignoring_fn(ignore),
@@ -710,7 +1412,11 @@ class ModelBase:
710
1412
 
711
1413
  created = list(pending.values())
712
1414
 
713
- return ReplaceMultiplePlan(cls, insert_only, created, updated, deleted)
1415
+ # Pass only manual insert_only and force_update to the plan
1416
+ # since upsert_multiple handles auto-detected ones
1417
+ return ReplaceMultiplePlan(
1418
+ cls, insert_only, force_update, created, updated, deleted
1419
+ )
714
1420
 
715
1421
  @classmethod
716
1422
  async def replace_multiple(
@@ -721,9 +1427,42 @@ class ModelBase:
721
1427
  where: Where,
722
1428
  ignore: FieldNamesSet = (),
723
1429
  insert_only: FieldNamesSet = (),
1430
+ force_update: FieldNamesSet = (),
724
1431
  ) -> tuple[list[T], list[T], list[T]]:
1432
+ """Replace records in the database with the provided data.
1433
+
1434
+ This is a complete replace operation: records matching the WHERE clause
1435
+ that aren't in the new data will be deleted, new records will be inserted,
1436
+ and changed records will be updated.
1437
+
1438
+ Args:
1439
+ connection: Database connection (must support FOR UPDATE)
1440
+ rows: New data as model instances or mappings
1441
+ where: WHERE clause to limit which existing records to consider for replacement
1442
+ ignore: Field names to ignore when comparing records
1443
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1444
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1445
+
1446
+ Returns:
1447
+ Tuple of (created_records, updated_records, deleted_records)
1448
+
1449
+ Example:
1450
+ >>> created, updated, deleted = await User.replace_multiple(
1451
+ ... conn, new_users, where=sql("department_id = {}", dept_id)
1452
+ ... )
1453
+
1454
+ Note:
1455
+ Fields marked with ColumnInfo(insert_only=True) are automatically
1456
+ treated as insert-only and combined with the insert_only parameter,
1457
+ unless overridden by force_update.
1458
+ """
725
1459
  plan = await cls.plan_replace_multiple(
726
- connection, rows, where=where, ignore=ignore, insert_only=insert_only
1460
+ connection,
1461
+ rows,
1462
+ where=where,
1463
+ ignore=ignore,
1464
+ insert_only=insert_only,
1465
+ force_update=force_update,
727
1466
  )
728
1467
  await plan.execute(connection)
729
1468
  return plan.cud
@@ -732,6 +1471,14 @@ class ModelBase:
732
1471
  def _get_differences_ignoring_fn(
733
1472
  cls: type[T], ignore: FieldNamesSet = ()
734
1473
  ) -> Callable[[T, T], list[str]]:
1474
+ """Generate optimized function to find field differences between instances.
1475
+
1476
+ Args:
1477
+ ignore: Field names to ignore during comparison
1478
+
1479
+ Returns:
1480
+ Compiled function that returns list of field names that differ
1481
+ """
735
1482
  env: dict[str, Any] = {}
736
1483
  func = [
737
1484
  "def differences_ignoring(a, b):",
@@ -755,8 +1502,41 @@ class ModelBase:
755
1502
  where: Where,
756
1503
  ignore: FieldNamesSet = (),
757
1504
  insert_only: FieldNamesSet = (),
1505
+ force_update: FieldNamesSet = (),
758
1506
  ) -> tuple[list[T], list[tuple[T, T, list[str]]], list[T]]:
759
- ignore = sorted(set(ignore) | set(insert_only))
1507
+ """Replace records and report the specific field differences for updates.
1508
+
1509
+ Like replace_multiple, but provides detailed information about which
1510
+ fields changed for each updated record.
1511
+
1512
+ Args:
1513
+ connection: Database connection (must support FOR UPDATE)
1514
+ rows: New data as model instances or mappings
1515
+ where: WHERE clause to limit which existing records to consider
1516
+ ignore: Field names to ignore when comparing records
1517
+ insert_only: Field names that should only be set on INSERT, not UPDATE
1518
+ force_update: Field names to force include in UPDATE clause, overriding insert_only settings
1519
+
1520
+ Returns:
1521
+ Tuple of (created_records, update_triples, deleted_records)
1522
+ where update_triples contains (old_record, new_record, changed_field_names)
1523
+
1524
+ Example:
1525
+ >>> created, updates, deleted = await User.replace_multiple_reporting_differences(
1526
+ ... conn, new_users, where=sql("department_id = {}", dept_id)
1527
+ ... )
1528
+ >>> for old, new, fields in updates:
1529
+ ... print(f"Updated {old.name}: changed {', '.join(fields)}")
1530
+
1531
+ Note:
1532
+ Fields marked with ColumnInfo(insert_only=True) are automatically
1533
+ treated as insert-only and combined with the insert_only parameter,
1534
+ unless overridden by force_update.
1535
+ """
1536
+ # For comparison purposes, combine auto-detected insert_only fields with manual ones
1537
+ all_insert_only = cls.insert_only_field_names() | set(insert_only)
1538
+ default_ignore = cls.replace_ignore_field_names() - set(force_update)
1539
+ ignore = sorted(set(ignore) | default_ignore | all_insert_only)
760
1540
  differences_ignoring = cls._cached(
761
1541
  ("differences_ignoring", tuple(ignore)),
762
1542
  lambda: cls._get_differences_ignoring_fn(ignore),
@@ -786,6 +1566,7 @@ class ModelBase:
786
1566
  connection,
787
1567
  (*created, *(t[1] for t in updated_triples)),
788
1568
  insert_only=insert_only,
1569
+ force_update=force_update,
789
1570
  )
790
1571
  if deleted:
791
1572
  await cls.delete_multiple(connection, deleted)
@@ -797,30 +1578,67 @@ class ModelBase:
797
1578
  class ReplaceMultiplePlan(Generic[T]):
798
1579
  model_class: type[T]
799
1580
  insert_only: FieldNamesSet
1581
+ force_update: FieldNamesSet
800
1582
  created: list[T]
801
1583
  updated: list[T]
802
1584
  deleted: list[T]
803
1585
 
804
1586
  @property
805
1587
  def cud(self) -> tuple[list[T], list[T], list[T]]:
1588
+ """Get the create, update, delete lists as a tuple.
1589
+
1590
+ Returns:
1591
+ Tuple of (created, updated, deleted) record lists
1592
+ """
806
1593
  return (self.created, self.updated, self.deleted)
807
1594
 
808
1595
  async def execute_upserts(self, connection: Connection) -> None:
1596
+ """Execute the upsert operations (creates and updates).
1597
+
1598
+ Args:
1599
+ connection: Database connection
1600
+ """
809
1601
  if self.created or self.updated:
810
1602
  await self.model_class.upsert_multiple(
811
- connection, (*self.created, *self.updated), insert_only=self.insert_only
1603
+ connection,
1604
+ (*self.created, *self.updated),
1605
+ insert_only=self.insert_only,
1606
+ force_update=self.force_update,
812
1607
  )
813
1608
 
814
1609
  async def execute_deletes(self, connection: Connection) -> None:
1610
+ """Execute the delete operations.
1611
+
1612
+ Args:
1613
+ connection: Database connection
1614
+ """
815
1615
  if self.deleted:
816
1616
  await self.model_class.delete_multiple(connection, self.deleted)
817
1617
 
818
1618
  async def execute(self, connection: Connection) -> None:
1619
+ """Execute all planned operations (upserts then deletes).
1620
+
1621
+ Args:
1622
+ connection: Database connection
1623
+ """
819
1624
  await self.execute_upserts(connection)
820
1625
  await self.execute_deletes(connection)
821
1626
 
822
1627
 
823
1628
  def chunked(lst, n):
1629
+ """Split an iterable into chunks of size n.
1630
+
1631
+ Args:
1632
+ lst: Iterable to chunk
1633
+ n: Chunk size
1634
+
1635
+ Yields:
1636
+ Lists of up to n items from the input
1637
+
1638
+ Example:
1639
+ >>> list(chunked([1, 2, 3, 4, 5], 2))
1640
+ [[1, 2], [3, 4], [5]]
1641
+ """
824
1642
  if type(lst) is not list:
825
1643
  lst = list(lst)
826
1644
  for i in range(0, len(lst), n):