fluxfem 0.1.4__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. fluxfem/__init__.py +69 -13
  2. fluxfem/core/__init__.py +140 -53
  3. fluxfem/core/assembly.py +691 -97
  4. fluxfem/core/basis.py +75 -54
  5. fluxfem/core/context_types.py +36 -12
  6. fluxfem/core/dtypes.py +9 -1
  7. fluxfem/core/forms.py +10 -0
  8. fluxfem/core/mixed_assembly.py +263 -0
  9. fluxfem/core/mixed_space.py +382 -0
  10. fluxfem/core/mixed_weakform.py +97 -0
  11. fluxfem/core/solver.py +2 -0
  12. fluxfem/core/space.py +315 -30
  13. fluxfem/core/weakform.py +821 -42
  14. fluxfem/helpers_wf.py +49 -0
  15. fluxfem/mesh/__init__.py +54 -2
  16. fluxfem/mesh/base.py +318 -9
  17. fluxfem/mesh/contact.py +841 -0
  18. fluxfem/mesh/dtypes.py +12 -0
  19. fluxfem/mesh/hex.py +17 -16
  20. fluxfem/mesh/io.py +9 -6
  21. fluxfem/mesh/mortar.py +3970 -0
  22. fluxfem/mesh/supermesh.py +318 -0
  23. fluxfem/mesh/surface.py +104 -26
  24. fluxfem/mesh/tet.py +16 -7
  25. fluxfem/physics/diffusion.py +3 -0
  26. fluxfem/physics/elasticity/hyperelastic.py +35 -3
  27. fluxfem/physics/elasticity/linear.py +22 -4
  28. fluxfem/physics/elasticity/stress.py +9 -5
  29. fluxfem/physics/operators.py +12 -5
  30. fluxfem/physics/postprocess.py +29 -3
  31. fluxfem/solver/__init__.py +47 -2
  32. fluxfem/solver/bc.py +38 -2
  33. fluxfem/solver/block_matrix.py +284 -0
  34. fluxfem/solver/block_system.py +477 -0
  35. fluxfem/solver/cg.py +150 -55
  36. fluxfem/solver/dirichlet.py +358 -5
  37. fluxfem/solver/history.py +15 -3
  38. fluxfem/solver/newton.py +260 -70
  39. fluxfem/solver/petsc.py +445 -0
  40. fluxfem/solver/preconditioner.py +109 -0
  41. fluxfem/solver/result.py +18 -0
  42. fluxfem/solver/solve_runner.py +208 -23
  43. fluxfem/solver/solver.py +35 -12
  44. fluxfem/solver/sparse.py +149 -15
  45. fluxfem/tools/jit.py +19 -7
  46. fluxfem/tools/timer.py +14 -12
  47. fluxfem/tools/visualizer.py +16 -4
  48. fluxfem-0.2.1.dist-info/METADATA +314 -0
  49. fluxfem-0.2.1.dist-info/RECORD +59 -0
  50. fluxfem-0.1.4.dist-info/METADATA +0 -127
  51. fluxfem-0.1.4.dist-info/RECORD +0 -48
  52. {fluxfem-0.1.4.dist-info → fluxfem-0.2.1.dist-info}/LICENSE +0 -0
  53. {fluxfem-0.1.4.dist-info → fluxfem-0.2.1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,477 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Any, Mapping, Sequence, TypeAlias
5
+
6
+ import numpy as np
7
+
8
+ try:
9
+ import scipy.sparse as sp
10
+ except Exception: # pragma: no cover
11
+ sp = None
12
+
13
+ from .dirichlet import DirichletBC, free_dofs
14
+ from .sparse import FluxSparseMatrix
15
+
16
+ MatrixLike: TypeAlias = Any
17
+ FieldKey: TypeAlias = str | int
18
+ BlockMap: TypeAlias = Mapping[FieldKey, Mapping[FieldKey, MatrixLike]]
19
+
20
+
21
+ @dataclass(frozen=True)
22
+ class BlockSystem:
23
+ K: MatrixLike
24
+ F: np.ndarray
25
+ free_dofs: np.ndarray
26
+ dirichlet: DirichletBC
27
+ field_order: tuple[FieldKey, ...]
28
+ field_slices: dict[FieldKey, slice]
29
+
30
+ def expand(self, u_free: np.ndarray) -> np.ndarray:
31
+ return self.dirichlet.expand_solution(u_free, free=self.free_dofs, n_total=self.F.shape[0])
32
+
33
+ def split(self, u_full: np.ndarray) -> dict[FieldKey, np.ndarray]:
34
+ return {name: np.asarray(u_full)[self.field_slices[name]] for name in self.field_order}
35
+
36
+ def join(self, fields: Mapping[FieldKey, np.ndarray]) -> np.ndarray:
37
+ parts = []
38
+ for name in self.field_order:
39
+ if name not in fields:
40
+ raise KeyError(f"Missing field '{name}' in join.")
41
+ parts.append(np.asarray(fields[name]))
42
+ return np.concatenate(parts, axis=0)
43
+
44
+
45
+ def _build_field_slices(
46
+ order: Sequence[FieldKey], sizes: Mapping[FieldKey, int]
47
+ ) -> tuple[dict[FieldKey, int], dict[FieldKey, slice], int]:
48
+ offsets = {}
49
+ slices = {}
50
+ offset = 0
51
+ for name in order:
52
+ size = int(sizes[name])
53
+ offsets[name] = offset
54
+ slices[name] = slice(offset, offset + size)
55
+ offset += size
56
+ return offsets, slices, offset
57
+
58
+
59
+ def split_block_matrix(
60
+ matrix: MatrixLike,
61
+ *,
62
+ sizes: Mapping[FieldKey, int],
63
+ order: Sequence[FieldKey] | None = None,
64
+ ) -> dict[FieldKey, dict[FieldKey, MatrixLike]]:
65
+ """
66
+ Split a block matrix into a dict-of-dicts by field order and sizes.
67
+ """
68
+ field_order = tuple(order) if order is not None else tuple(sizes.keys())
69
+ for name in field_order:
70
+ if name not in sizes:
71
+ raise KeyError(f"Missing size for field '{name}'")
72
+ offsets, _, n_total = _build_field_slices(field_order, sizes)
73
+
74
+ if isinstance(matrix, FluxSparseMatrix):
75
+ if sp is None:
76
+ raise ImportError("scipy is required to split FluxSparseMatrix blocks.")
77
+ mat = matrix.to_csr()
78
+ elif sp is not None and sp.issparse(matrix):
79
+ mat = matrix.tocsr()
80
+ else:
81
+ mat = np.asarray(matrix)
82
+
83
+ if mat.shape != (n_total, n_total):
84
+ raise ValueError(f"matrix has shape {mat.shape}, expected {(n_total, n_total)}")
85
+
86
+ blocks: dict[FieldKey, dict[FieldKey, MatrixLike]] = {}
87
+ for name_i in field_order:
88
+ row = {}
89
+ i0 = offsets[name_i]
90
+ i1 = i0 + int(sizes[name_i])
91
+ for name_j in field_order:
92
+ j0 = offsets[name_j]
93
+ j1 = j0 + int(sizes[name_j])
94
+ row[name_j] = mat[i0:i1, j0:j1]
95
+ blocks[name_i] = row
96
+ return blocks
97
+
98
+
99
+ def _infer_format(blocks: BlockMap, fmt: str) -> str:
100
+ if fmt != "auto":
101
+ return fmt
102
+ for row in blocks.values():
103
+ for blk in row.values():
104
+ if isinstance(blk, FluxSparseMatrix):
105
+ return "flux"
106
+ if sp is not None and sp.issparse(blk):
107
+ return "csr"
108
+ return "dense"
109
+
110
+
111
+ def _infer_sizes_from_diag_seq(diag_seq: Sequence[MatrixLike]) -> dict[int, int]:
112
+ sizes = {}
113
+ for idx, blk in enumerate(diag_seq):
114
+ if isinstance(blk, FluxSparseMatrix):
115
+ sizes[idx] = int(blk.n_dofs)
116
+ elif sp is not None and sp.issparse(blk):
117
+ shape = blk.shape
118
+ if shape[0] != shape[1]:
119
+ raise ValueError(f"diag block {idx} must be square, got {shape}")
120
+ sizes[idx] = int(shape[0])
121
+ else:
122
+ arr = np.asarray(blk)
123
+ if arr.ndim != 2 or arr.shape[0] != arr.shape[1]:
124
+ raise ValueError(f"diag block {idx} must be square, got {arr.shape}")
125
+ sizes[idx] = int(arr.shape[0])
126
+ return sizes
127
+
128
+
129
+ def _coerce_rhs(
130
+ rhs: MatrixLike | Sequence[MatrixLike] | Mapping[FieldKey, MatrixLike] | None,
131
+ order: Sequence[FieldKey],
132
+ sizes: Mapping[FieldKey, int],
133
+ ) -> np.ndarray:
134
+ if rhs is None:
135
+ return np.zeros(sum(int(sizes[n]) for n in order), dtype=float)
136
+ if isinstance(rhs, Mapping):
137
+ parts = [np.asarray(rhs.get(name, np.zeros(int(sizes[name]), dtype=float))) for name in order]
138
+ for name, part in zip(order, parts):
139
+ if part.shape != (int(sizes[name]),):
140
+ raise ValueError(f"rhs[{name}] has shape {part.shape}, expected {(int(sizes[name]),)}")
141
+ return np.concatenate(parts, axis=0)
142
+ if hasattr(rhs, "shape") and not isinstance(rhs, (list, tuple)):
143
+ rhs_arr = np.asarray(rhs)
144
+ if rhs_arr.shape != (sum(int(sizes[n]) for n in order),):
145
+ raise ValueError("rhs vector has unexpected shape")
146
+ return rhs_arr
147
+ parts = list(rhs)
148
+ if len(parts) != len(order):
149
+ raise ValueError("rhs sequence length must match number of fields")
150
+ parts = [np.asarray(p) for p in parts]
151
+ for name, part in zip(order, parts):
152
+ if part.shape != (int(sizes[name]),):
153
+ raise ValueError(f"rhs for {name} has shape {part.shape}, expected {(int(sizes[name]),)}")
154
+ return np.concatenate(parts, axis=0)
155
+
156
+
157
+ def _build_dirichlet_from_fields(
158
+ fields: Mapping[FieldKey, object], offsets: Mapping[FieldKey, int], *, merge: str
159
+ ) -> DirichletBC:
160
+ if merge not in {"check_equal", "error", "first", "last"}:
161
+ raise ValueError("merge must be one of: check_equal, error, first, last")
162
+ dof_map: dict[int, float] = {}
163
+ for name, spec in fields.items():
164
+ if name not in offsets:
165
+ raise KeyError(f"Unknown field '{name}' in constraints")
166
+ offset = int(offsets[name])
167
+ if isinstance(spec, DirichletBC):
168
+ dofs = spec.dofs
169
+ vals = spec.vals
170
+ elif isinstance(spec, tuple) and len(spec) == 2:
171
+ dofs, vals = spec
172
+ else:
173
+ dofs, vals = spec, None
174
+ bc = DirichletBC(dofs, vals)
175
+ g_dofs = np.asarray(bc.dofs, dtype=int) + offset
176
+ g_vals = np.asarray(bc.vals, dtype=float)
177
+ for d, v in zip(g_dofs, g_vals):
178
+ if d in dof_map:
179
+ if merge == "error":
180
+ raise ValueError(f"Duplicate Dirichlet DOF {d} in constraints")
181
+ if merge == "check_equal":
182
+ if not np.isclose(dof_map[d], v):
183
+ raise ValueError(f"Conflicting Dirichlet value for DOF {d}")
184
+ if merge == "first":
185
+ continue
186
+ dof_map[d] = float(v)
187
+ if not dof_map:
188
+ return DirichletBC(np.array([], dtype=int), np.array([], dtype=float))
189
+ dofs_sorted = np.array(sorted(dof_map.keys()), dtype=int)
190
+ vals_sorted = np.array([dof_map[d] for d in dofs_sorted], dtype=float)
191
+ return DirichletBC(dofs_sorted, vals_sorted)
192
+
193
+
194
+ def _build_dirichlet_from_sequence(
195
+ seq: Sequence[object | None],
196
+ order: Sequence[FieldKey],
197
+ offsets: Mapping[FieldKey, int],
198
+ *,
199
+ merge: str,
200
+ ) -> DirichletBC:
201
+ if merge not in {"check_equal", "error", "first", "last"}:
202
+ raise ValueError("merge must be one of: check_equal, error, first, last")
203
+ if len(seq) != len(order):
204
+ raise ValueError("constraints sequence length must match order")
205
+ dof_map: dict[int, float] = {}
206
+ for name, spec in zip(order, seq):
207
+ if spec is None:
208
+ continue
209
+ offset = int(offsets[name])
210
+ if isinstance(spec, DirichletBC):
211
+ dofs = spec.dofs
212
+ vals = spec.vals
213
+ elif isinstance(spec, tuple) and len(spec) == 2:
214
+ dofs, vals = spec
215
+ else:
216
+ dofs, vals = spec, None
217
+ bc = DirichletBC(dofs, vals)
218
+ g_dofs = np.asarray(bc.dofs, dtype=int) + offset
219
+ g_vals = np.asarray(bc.vals, dtype=float)
220
+ for d, v in zip(g_dofs, g_vals):
221
+ if d in dof_map:
222
+ if merge == "error":
223
+ raise ValueError(f"Duplicate Dirichlet DOF {d} in constraints")
224
+ if merge == "check_equal":
225
+ if not np.isclose(dof_map[d], v):
226
+ raise ValueError(f"Conflicting Dirichlet value for DOF {d}")
227
+ if merge == "first":
228
+ continue
229
+ dof_map[d] = float(v)
230
+ if not dof_map:
231
+ return DirichletBC(np.array([], dtype=int), np.array([], dtype=float))
232
+ dofs_sorted = np.array(sorted(dof_map.keys()), dtype=int)
233
+ vals_sorted = np.array([dof_map[d] for d in dofs_sorted], dtype=float)
234
+ return DirichletBC(dofs_sorted, vals_sorted)
235
+
236
+
237
+ def _transpose_block(block: MatrixLike, rule: str) -> MatrixLike:
238
+ if isinstance(block, FluxSparseMatrix):
239
+ if sp is None:
240
+ raise ImportError("scipy is required to transpose FluxSparseMatrix blocks.")
241
+ block = block.to_csr()
242
+ if sp is not None and sp.issparse(block):
243
+ out = block.T
244
+ else:
245
+ out = np.asarray(block).T
246
+ if rule == "H":
247
+ return out.conjugate()
248
+ return out
249
+
250
+
251
+ def _add_blocks(a: MatrixLike | None, b: MatrixLike | None) -> MatrixLike | None:
252
+ if a is None:
253
+ return b
254
+ if b is None:
255
+ return a
256
+ if isinstance(a, FluxSparseMatrix):
257
+ a = a.to_csr()
258
+ if isinstance(b, FluxSparseMatrix):
259
+ b = b.to_csr()
260
+ if sp is not None and sp.issparse(a):
261
+ if sp.issparse(b):
262
+ return a + b
263
+ return a + sp.csr_matrix(np.asarray(b))
264
+ if sp is not None and sp.issparse(b):
265
+ return sp.csr_matrix(np.asarray(a)) + b
266
+ return np.asarray(a) + np.asarray(b)
267
+
268
+
269
+ def _blocks_from_diag_rel(
270
+ *,
271
+ diag: Mapping[FieldKey, MatrixLike] | Sequence[MatrixLike],
272
+ sizes: Mapping[FieldKey, int],
273
+ order: Sequence[FieldKey],
274
+ rel: Mapping[tuple[FieldKey, FieldKey], MatrixLike] | None = None,
275
+ add_contiguous: MatrixLike | None = None,
276
+ symmetric: bool = False,
277
+ transpose_rule: str = "T",
278
+ ) -> BlockMap:
279
+ if isinstance(diag, Mapping):
280
+ diag_map = dict(diag)
281
+ else:
282
+ diag_seq = list(diag)
283
+ if len(diag_seq) != len(order):
284
+ raise ValueError("diag sequence length must match order")
285
+ diag_map = dict(zip(order, diag_seq))
286
+
287
+ if add_contiguous is None:
288
+ blocks = {name: {} for name in order}
289
+ else:
290
+ blocks = split_block_matrix(add_contiguous, sizes=sizes, order=order)
291
+
292
+ if transpose_rule not in {"T", "H", "none"}:
293
+ raise ValueError("transpose_rule must be one of: T, H, none")
294
+
295
+ for name, blk in diag_map.items():
296
+ if name not in sizes:
297
+ raise KeyError(f"Unknown field '{name}' in diag")
298
+ blocks.setdefault(name, {})
299
+ blocks[name][name] = _add_blocks(blocks[name].get(name), blk)
300
+
301
+ if rel is not None:
302
+ for (name_i, name_j), blk in rel.items():
303
+ if name_i not in sizes or name_j not in sizes:
304
+ raise KeyError(f"Unknown field in rel: {(name_i, name_j)}")
305
+ blocks.setdefault(name_i, {})
306
+ blocks[name_i][name_j] = _add_blocks(blocks[name_i].get(name_j), blk)
307
+ if symmetric and name_i != name_j:
308
+ blocks.setdefault(name_j, {})
309
+ if transpose_rule == "none":
310
+ blocks[name_j][name_i] = _add_blocks(blocks[name_j].get(name_i), blk)
311
+ else:
312
+ blocks[name_j][name_i] = _add_blocks(
313
+ blocks[name_j].get(name_i),
314
+ _transpose_block(blk, transpose_rule),
315
+ )
316
+
317
+ return blocks
318
+
319
+
320
+ def build_block_system(
321
+ *,
322
+ diag: Mapping[FieldKey, MatrixLike] | Sequence[MatrixLike],
323
+ sizes: Mapping[FieldKey, int] | None = None,
324
+ rel: Mapping[tuple[FieldKey, FieldKey], MatrixLike] | None = None,
325
+ add_contiguous: MatrixLike | None = None,
326
+ rhs: Mapping[FieldKey, MatrixLike] | Sequence[MatrixLike] | np.ndarray | None = None,
327
+ constraints: object | None = None,
328
+ merge: str = "check_equal",
329
+ format: str = "auto",
330
+ symmetric: bool = False,
331
+ transpose_rule: str = "T",
332
+ ) -> BlockSystem:
333
+ """
334
+ Build a block system from diagonal blocks, optional relations, and constraints.
335
+
336
+ format:
337
+ - "auto": FluxSparseMatrix if any block is FluxSparseMatrix, CSR if any block is sparse, else dense
338
+ - "flux": return FluxSparseMatrix
339
+ - "csr": return scipy.sparse CSR
340
+ - "dense": return numpy ndarray
341
+ """
342
+ if sizes is None:
343
+ if isinstance(diag, Mapping):
344
+ sizes = _infer_sizes_from_diag(diag)
345
+ field_order = tuple(sizes.keys())
346
+ else:
347
+ sizes = _infer_sizes_from_diag_seq(diag)
348
+ field_order = tuple(range(len(diag)))
349
+ else:
350
+ field_order = tuple(sizes.keys())
351
+ offsets, field_slices, n_total = _build_field_slices(field_order, sizes)
352
+ prefer_flux = False
353
+ if format == "auto":
354
+ if isinstance(add_contiguous, FluxSparseMatrix):
355
+ prefer_flux = True
356
+ if isinstance(diag, Mapping):
357
+ prefer_flux = any(isinstance(blk, FluxSparseMatrix) for blk in diag.values())
358
+ else:
359
+ prefer_flux = any(isinstance(blk, FluxSparseMatrix) for blk in diag)
360
+ blocks = _blocks_from_diag_rel(
361
+ diag=diag,
362
+ rel=rel,
363
+ add_contiguous=add_contiguous,
364
+ sizes=sizes,
365
+ order=field_order,
366
+ symmetric=symmetric,
367
+ transpose_rule=transpose_rule,
368
+ )
369
+ use_format = "flux" if prefer_flux else _infer_format(blocks, format)
370
+
371
+ def _block_shape(name_i, name_j):
372
+ return (int(sizes[name_i]), int(sizes[name_j]))
373
+
374
+ if use_format == "flux":
375
+ rows_list = []
376
+ cols_list = []
377
+ data_list = []
378
+ for name_i in field_order:
379
+ row_blocks = blocks.get(name_i, {})
380
+ for name_j in field_order:
381
+ blk = row_blocks.get(name_j)
382
+ if blk is None:
383
+ continue
384
+ shape = _block_shape(name_i, name_j)
385
+ if isinstance(blk, FluxSparseMatrix):
386
+ if shape[0] != shape[1] or int(blk.n_dofs) != shape[0]:
387
+ raise ValueError(f"Block {name_i},{name_j} has incompatible FluxSparseMatrix size")
388
+ r = np.asarray(blk.pattern.rows, dtype=np.int64)
389
+ c = np.asarray(blk.pattern.cols, dtype=np.int64)
390
+ d = np.asarray(blk.data)
391
+ elif sp is not None and sp.issparse(blk):
392
+ coo = blk.tocoo()
393
+ r = np.asarray(coo.row, dtype=np.int64)
394
+ c = np.asarray(coo.col, dtype=np.int64)
395
+ d = np.asarray(coo.data)
396
+ if coo.shape != shape:
397
+ raise ValueError(f"Block {name_i},{name_j} has shape {coo.shape}, expected {shape}")
398
+ else:
399
+ arr = np.asarray(blk)
400
+ if arr.shape != shape:
401
+ raise ValueError(f"Block {name_i},{name_j} has shape {arr.shape}, expected {shape}")
402
+ r, c = np.nonzero(arr)
403
+ d = arr[r, c]
404
+ if r.size:
405
+ rows_list.append(r + offsets[name_i])
406
+ cols_list.append(c + offsets[name_j])
407
+ data_list.append(d)
408
+ rows = np.concatenate(rows_list) if rows_list else np.asarray([], dtype=np.int32)
409
+ cols = np.concatenate(cols_list) if cols_list else np.asarray([], dtype=np.int32)
410
+ data = np.concatenate(data_list) if data_list else np.asarray([], dtype=float)
411
+ K = FluxSparseMatrix(rows, cols, data, n_total)
412
+ else:
413
+ if use_format == "csr" and sp is None:
414
+ raise ImportError("scipy is required for CSR block systems.")
415
+ block_rows = []
416
+ for name_i in field_order:
417
+ row = []
418
+ row_blocks = blocks.get(name_i, {})
419
+ for name_j in field_order:
420
+ blk = row_blocks.get(name_j)
421
+ shape = _block_shape(name_i, name_j)
422
+ if blk is None:
423
+ if use_format == "csr":
424
+ row.append(sp.csr_matrix(shape))
425
+ else:
426
+ row.append(np.zeros(shape, dtype=float))
427
+ continue
428
+ if isinstance(blk, FluxSparseMatrix):
429
+ if sp is None:
430
+ raise ImportError("scipy is required to assemble sparse block systems.")
431
+ blk = blk.to_csr()
432
+ if sp is not None and sp.issparse(blk):
433
+ blk = blk.tocsr()
434
+ if blk.shape != shape:
435
+ raise ValueError(f"Block {name_i},{name_j} has shape {blk.shape}, expected {shape}")
436
+ row.append(blk)
437
+ else:
438
+ arr = np.asarray(blk)
439
+ if arr.shape != shape:
440
+ raise ValueError(f"Block {name_i},{name_j} has shape {arr.shape}, expected {shape}")
441
+ if use_format == "csr":
442
+ row.append(sp.csr_matrix(arr))
443
+ else:
444
+ row.append(arr)
445
+ block_rows.append(row)
446
+ if use_format == "csr":
447
+ K = sp.bmat(block_rows, format="csr")
448
+ else:
449
+ K = np.block(block_rows)
450
+
451
+ F = _coerce_rhs(rhs, field_order, sizes)
452
+
453
+ if constraints is None:
454
+ bc = DirichletBC(np.array([], dtype=int), np.array([], dtype=float))
455
+ free = free_dofs(n_total, bc.dofs)
456
+ return BlockSystem(K=K, F=F, free_dofs=free, dirichlet=bc, field_order=field_order, field_slices=field_slices)
457
+
458
+ if isinstance(constraints, DirichletBC):
459
+ bc = constraints
460
+ elif isinstance(constraints, tuple) and len(constraints) == 2:
461
+ bc = DirichletBC(constraints[0], constraints[1])
462
+ elif isinstance(constraints, Mapping):
463
+ bc = _build_dirichlet_from_fields(constraints, offsets, merge=merge)
464
+ elif isinstance(constraints, Sequence) and not isinstance(constraints, (str, bytes)):
465
+ bc = _build_dirichlet_from_sequence(constraints, field_order, offsets, merge=merge)
466
+ else:
467
+ raise ValueError("constraints must be DirichletBC, (dofs, vals), or mapping")
468
+
469
+ system = bc.condense_system(K, F)
470
+ return BlockSystem(
471
+ K=system.K,
472
+ F=np.asarray(system.F),
473
+ free_dofs=system.free_dofs,
474
+ dirichlet=bc,
475
+ field_order=field_order,
476
+ field_slices=field_slices,
477
+ )