bsplyne 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bsplyne/__init__.py +55 -0
- bsplyne/b_spline.py +2464 -0
- bsplyne/b_spline_basis.py +1000 -0
- bsplyne/geometries_in_3D.py +1193 -0
- bsplyne/multi_patch_b_spline.py +1731 -0
- bsplyne/my_wide_product.py +209 -0
- bsplyne/parallel_utils.py +378 -0
- bsplyne/save_utils.py +141 -0
- bsplyne-1.0.0.dist-info/METADATA +91 -0
- bsplyne-1.0.0.dist-info/RECORD +13 -0
- bsplyne-1.0.0.dist-info/WHEEL +5 -0
- bsplyne-1.0.0.dist-info/licenses/LICENSE.txt +70 -0
- bsplyne-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1731 @@
|
|
|
1
|
+
# %%
|
|
2
|
+
import os
|
|
3
|
+
from itertools import permutations
|
|
4
|
+
from typing import Iterable, Union, Literal
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
import numba as nb
|
|
8
|
+
import meshio as io
|
|
9
|
+
from scipy.spatial import cKDTree
|
|
10
|
+
|
|
11
|
+
from .b_spline import BSpline
|
|
12
|
+
from .b_spline_basis import BSplineBasis
|
|
13
|
+
from .save_utils import writePVD, merge_meshes
|
|
14
|
+
from .parallel_utils import parallel_blocks
|
|
15
|
+
|
|
16
|
+
# from .save_YETI import Domain, write
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# union-find algorithm for connectivity
|
|
20
|
+
@nb.njit(nb.int32(nb.int32[:], nb.int32), cache=True)
|
|
21
|
+
def _find(parent, x):
|
|
22
|
+
if parent[x] != x:
|
|
23
|
+
parent[x] = _find(parent, parent[x])
|
|
24
|
+
return parent[x]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@nb.njit(nb.void(nb.int32[:], nb.int32[:], nb.int32, nb.int32), cache=True)
|
|
28
|
+
def _union(parent, rank, x, y):
|
|
29
|
+
rootX = _find(parent, x)
|
|
30
|
+
rootY = _find(parent, y)
|
|
31
|
+
if rootX != rootY:
|
|
32
|
+
if rank[rootX] > rank[rootY]:
|
|
33
|
+
parent[rootY] = rootX
|
|
34
|
+
elif rank[rootX] < rank[rootY]:
|
|
35
|
+
parent[rootX] = rootY
|
|
36
|
+
else:
|
|
37
|
+
parent[rootY] = rootX
|
|
38
|
+
rank[rootX] += 1
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@nb.njit(nb.int32[:](nb.int32[:, :], nb.int64), cache=True)
|
|
42
|
+
def _get_unique_nodes_inds(nodes_couples, nb_nodes):
|
|
43
|
+
parent = np.arange(nb_nodes, dtype=np.int32)
|
|
44
|
+
rank = np.zeros(nb_nodes, dtype=np.int32)
|
|
45
|
+
for a, b in nodes_couples:
|
|
46
|
+
_union(parent, rank, a, b)
|
|
47
|
+
unique_nodes_inds = np.empty(nb_nodes, dtype=np.int32)
|
|
48
|
+
for i in range(nb_nodes):
|
|
49
|
+
unique_nodes_inds[i] = _find(parent, i)
|
|
50
|
+
return unique_nodes_inds
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class MultiPatchBSplineConnectivity:
|
|
54
|
+
"""
|
|
55
|
+
Contains all the methods to link multiple B-spline patches.
|
|
56
|
+
It uses 3 representations of the data :
|
|
57
|
+
- a unique representation, possibly common with other meshes, containing
|
|
58
|
+
only unique nodes indices,
|
|
59
|
+
- a unpacked representation containing duplicated nodes indices,
|
|
60
|
+
- a separated representation containing duplicated nodes indices,
|
|
61
|
+
separated between patches. It is here for user friendliness.
|
|
62
|
+
|
|
63
|
+
Attributes
|
|
64
|
+
----------
|
|
65
|
+
unique_nodes_inds : numpy.ndarray of int
|
|
66
|
+
The indices of the unique representation needed to create the unpacked one.
|
|
67
|
+
shape_by_patch : numpy.ndarray of int
|
|
68
|
+
The shape of the separated nodes by patch.
|
|
69
|
+
nb_nodes : int
|
|
70
|
+
The total number of unpacked nodes.
|
|
71
|
+
nb_unique_nodes : int
|
|
72
|
+
The total number of unique nodes.
|
|
73
|
+
nb_patchs : int
|
|
74
|
+
The number of patches.
|
|
75
|
+
npa : int
|
|
76
|
+
The dimension of the parametric space of the B-splines.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
unique_nodes_inds: np.ndarray
|
|
80
|
+
shape_by_patch: np.ndarray
|
|
81
|
+
nb_nodes: int
|
|
82
|
+
nb_unique_nodes: int
|
|
83
|
+
nb_patchs: int
|
|
84
|
+
npa: int
|
|
85
|
+
|
|
86
|
+
def __init__(self, unique_nodes_inds, shape_by_patch, nb_unique_nodes):
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
Parameters
|
|
90
|
+
----------
|
|
91
|
+
unique_nodes_inds : numpy.ndarray of int
|
|
92
|
+
The indices of the unique representation needed to create the unpacked one.
|
|
93
|
+
shape_by_patch : numpy.ndarray of int
|
|
94
|
+
The shape of the separated nodes by patch.
|
|
95
|
+
nb_unique_nodes : int
|
|
96
|
+
The total number of unique nodes.
|
|
97
|
+
"""
|
|
98
|
+
self.unique_nodes_inds = unique_nodes_inds
|
|
99
|
+
self.shape_by_patch = shape_by_patch
|
|
100
|
+
self.nb_nodes = np.sum(np.prod(self.shape_by_patch, axis=1))
|
|
101
|
+
self.nb_unique_nodes = nb_unique_nodes # np.unique(self.unique_nodes_inds).size
|
|
102
|
+
self.nb_patchs, self.npa = self.shape_by_patch.shape
|
|
103
|
+
|
|
104
|
+
@classmethod
|
|
105
|
+
def from_nodes_couples(cls, nodes_couples, shape_by_patch):
|
|
106
|
+
"""
|
|
107
|
+
Create the connectivity from a list of couples of unpacked nodes.
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
nodes_couples : numpy.ndarray of int
|
|
112
|
+
Couples of indices of unpacked nodes that are considered the same.
|
|
113
|
+
Its shape should be (# of couples, 2)
|
|
114
|
+
shape_by_patch : numpy.ndarray of int
|
|
115
|
+
The shape of the separated nodes by patch.
|
|
116
|
+
|
|
117
|
+
Returns
|
|
118
|
+
-------
|
|
119
|
+
MultiPatchBSplineConnectivity
|
|
120
|
+
Instance of `MultiPatchBSplineConnectivity` created.
|
|
121
|
+
"""
|
|
122
|
+
nb_nodes = np.sum(np.prod(shape_by_patch, axis=1))
|
|
123
|
+
unique_nodes_inds = _get_unique_nodes_inds(
|
|
124
|
+
nodes_couples.astype(np.int32), np.int64(nb_nodes)
|
|
125
|
+
)
|
|
126
|
+
different_unique_nodes_inds, inverse = np.unique(
|
|
127
|
+
unique_nodes_inds, return_inverse=True
|
|
128
|
+
)
|
|
129
|
+
unique_nodes_inds -= np.cumsum(
|
|
130
|
+
np.diff(np.concatenate(([-1], different_unique_nodes_inds))) - 1
|
|
131
|
+
)[inverse]
|
|
132
|
+
nb_unique_nodes = np.unique(unique_nodes_inds).size
|
|
133
|
+
return cls(unique_nodes_inds, shape_by_patch, nb_unique_nodes)
|
|
134
|
+
|
|
135
|
+
@classmethod
|
|
136
|
+
def from_separated_ctrlPts(
|
|
137
|
+
cls, separated_ctrlPts, eps=1e-10, return_nodes_couples: bool = False
|
|
138
|
+
):
|
|
139
|
+
"""
|
|
140
|
+
Create the connectivity from a list of control points given as
|
|
141
|
+
a separated field by comparing every couple of points.
|
|
142
|
+
|
|
143
|
+
Parameters
|
|
144
|
+
----------
|
|
145
|
+
separated_ctrlPts : list of numpy.ndarray of float
|
|
146
|
+
Control points of every patch to be compared in the separated
|
|
147
|
+
representation. Every array is of shape :
|
|
148
|
+
(``NPh``, nb elem for dim 1, ..., nb elem for dim ``npa``)
|
|
149
|
+
eps : float, optional
|
|
150
|
+
Maximum distance between two points to be considered the same, by default 1e-10
|
|
151
|
+
return_nodes_couples : bool, optional
|
|
152
|
+
If `True`, returns the `nodes_couples` created, by default False
|
|
153
|
+
|
|
154
|
+
Returns
|
|
155
|
+
-------
|
|
156
|
+
MultiPatchBSplineConnectivity
|
|
157
|
+
Instance of `MultiPatchBSplineConnectivity` created.
|
|
158
|
+
"""
|
|
159
|
+
NPh = separated_ctrlPts[0].shape[0]
|
|
160
|
+
assert np.all(
|
|
161
|
+
[ctrlPts.shape[0] == NPh for ctrlPts in separated_ctrlPts[1:]]
|
|
162
|
+
), "Physical spaces must contain the same number of dimensions !"
|
|
163
|
+
shape_by_patch = np.array(
|
|
164
|
+
[ctrlPts.shape[1:] for ctrlPts in separated_ctrlPts], dtype="int"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
ctrlPts = np.hstack([pts.reshape((NPh, -1)) for pts in separated_ctrlPts])
|
|
168
|
+
tree = cKDTree(ctrlPts.T)
|
|
169
|
+
matches = tree.query_ball_tree(tree, r=eps)
|
|
170
|
+
connex = set()
|
|
171
|
+
for match in matches:
|
|
172
|
+
if len(match) > 1:
|
|
173
|
+
connex.add(tuple(sorted(match)))
|
|
174
|
+
nodes_couples = []
|
|
175
|
+
for tup in connex:
|
|
176
|
+
tup = np.array(tup)
|
|
177
|
+
nodes_couples.append(np.stack((tup[:-1], tup[1:])).T)
|
|
178
|
+
|
|
179
|
+
# nodes_couples = []
|
|
180
|
+
# previous_pts = separated_ctrlPts[0].reshape((NPh, -1))
|
|
181
|
+
# previous_inds_counter = previous_pts.shape[1]
|
|
182
|
+
# previous_inds = np.arange(previous_inds_counter)
|
|
183
|
+
# for ctrlPts in separated_ctrlPts[1:]:
|
|
184
|
+
# # create current pts and inds
|
|
185
|
+
# current_pts = ctrlPts.reshape((NPh, -1))
|
|
186
|
+
# current_inds_counter = previous_inds_counter + current_pts.shape[1]
|
|
187
|
+
# current_inds = np.arange(previous_inds_counter, current_inds_counter)
|
|
188
|
+
# # get couples
|
|
189
|
+
# dist = np.linalg.norm(
|
|
190
|
+
# previous_pts[:, :, None] - current_pts[:, None, :], axis=0
|
|
191
|
+
# )
|
|
192
|
+
# previous_inds_inds, current_inds_inds = (dist < eps).nonzero()
|
|
193
|
+
# nodes_couples.append(
|
|
194
|
+
# np.hstack(
|
|
195
|
+
# (
|
|
196
|
+
# previous_inds[previous_inds_inds, None],
|
|
197
|
+
# current_inds[current_inds_inds, None],
|
|
198
|
+
# )
|
|
199
|
+
# )
|
|
200
|
+
# )
|
|
201
|
+
# # add current to previous for next iteration
|
|
202
|
+
# previous_pts = np.hstack((previous_pts, current_pts))
|
|
203
|
+
# previous_inds_counter = current_inds_counter
|
|
204
|
+
# previous_inds = np.hstack((previous_inds, current_inds))
|
|
205
|
+
if len(nodes_couples) > 0:
|
|
206
|
+
nodes_couples = np.vstack(nodes_couples)
|
|
207
|
+
else:
|
|
208
|
+
nodes_couples = np.empty((0, 2), dtype="int")
|
|
209
|
+
if return_nodes_couples:
|
|
210
|
+
return cls.from_nodes_couples(nodes_couples, shape_by_patch), nodes_couples
|
|
211
|
+
else:
|
|
212
|
+
return cls.from_nodes_couples(nodes_couples, shape_by_patch)
|
|
213
|
+
|
|
214
|
+
def unpack(self, unique_field):
|
|
215
|
+
"""
|
|
216
|
+
Extract the unpacked representation from a unique representation.
|
|
217
|
+
|
|
218
|
+
Parameters
|
|
219
|
+
----------
|
|
220
|
+
unique_field : numpy.ndarray
|
|
221
|
+
The unique representation. Its shape should be :
|
|
222
|
+
(field, shape, ..., `self`.`nb_unique_nodes`)
|
|
223
|
+
|
|
224
|
+
Returns
|
|
225
|
+
-------
|
|
226
|
+
unpacked_field : numpy.ndarray
|
|
227
|
+
The unpacked representation. Its shape is :
|
|
228
|
+
(field, shape, ..., `self`.`nb_nodes`)
|
|
229
|
+
"""
|
|
230
|
+
unpacked_field = unique_field[..., self.unique_nodes_inds]
|
|
231
|
+
return unpacked_field
|
|
232
|
+
|
|
233
|
+
def pack(self, unpacked_field, method="mean"):
|
|
234
|
+
"""
|
|
235
|
+
Extract the unique representation from an unpacked representation.
|
|
236
|
+
|
|
237
|
+
Parameters
|
|
238
|
+
----------
|
|
239
|
+
unpacked_field : numpy.ndarray
|
|
240
|
+
The unpacked representation. Its shape should be :
|
|
241
|
+
(field, shape, ..., `self`.`nb_nodes`)
|
|
242
|
+
method: str
|
|
243
|
+
The method used to group values that could be different
|
|
244
|
+
|
|
245
|
+
Returns
|
|
246
|
+
-------
|
|
247
|
+
unique_nodes : numpy.ndarray
|
|
248
|
+
The unique representation. Its shape is :
|
|
249
|
+
(field, shape, ..., `self`.`nb_unique_nodes`)
|
|
250
|
+
"""
|
|
251
|
+
field_shape = unpacked_field.shape[:-1]
|
|
252
|
+
unique_field = np.zeros(
|
|
253
|
+
(*field_shape, self.nb_unique_nodes), dtype=unpacked_field.dtype
|
|
254
|
+
)
|
|
255
|
+
if method == "last":
|
|
256
|
+
unique_field[..., self.unique_nodes_inds] = unpacked_field
|
|
257
|
+
elif method == "first":
|
|
258
|
+
unique_field[..., self.unique_nodes_inds[::-1]] = unpacked_field[..., ::-1]
|
|
259
|
+
elif method == "mean":
|
|
260
|
+
np.add.at(unique_field.T, self.unique_nodes_inds, unpacked_field.T)
|
|
261
|
+
counts = np.zeros(self.nb_unique_nodes, dtype="uint")
|
|
262
|
+
np.add.at(counts, self.unique_nodes_inds, 1)
|
|
263
|
+
unique_field /= counts
|
|
264
|
+
else:
|
|
265
|
+
raise NotImplementedError(
|
|
266
|
+
f"Method {method} is not implemented ! Consider using 'first' or 'mean'."
|
|
267
|
+
)
|
|
268
|
+
return unique_field
|
|
269
|
+
|
|
270
|
+
def separate(self, unpacked_field):
|
|
271
|
+
"""
|
|
272
|
+
Extract the separated representation from an unpacked representation.
|
|
273
|
+
|
|
274
|
+
Parameters
|
|
275
|
+
----------
|
|
276
|
+
unpacked_field : numpy.ndarray
|
|
277
|
+
The unpacked representation. Its shape is :
|
|
278
|
+
(field, shape, ..., `self`.`nb_nodes`)
|
|
279
|
+
|
|
280
|
+
Returns
|
|
281
|
+
-------
|
|
282
|
+
separated_field : list of numpy.ndarray
|
|
283
|
+
The separated representation. Every array is of shape :
|
|
284
|
+
(field, shape, ..., nb elem for dim 1, ..., nb elem for dim `npa`)
|
|
285
|
+
"""
|
|
286
|
+
field_shape = unpacked_field.shape[:-1]
|
|
287
|
+
separated_field = []
|
|
288
|
+
ind = 0
|
|
289
|
+
for patch_shape in self.shape_by_patch:
|
|
290
|
+
next_ind = ind + np.prod(patch_shape)
|
|
291
|
+
separated_field.append(
|
|
292
|
+
unpacked_field[..., ind:next_ind].reshape((*field_shape, *patch_shape))
|
|
293
|
+
)
|
|
294
|
+
ind = next_ind
|
|
295
|
+
return separated_field
|
|
296
|
+
|
|
297
|
+
def agglomerate(self, separated_field):
|
|
298
|
+
"""
|
|
299
|
+
Extract the unpacked representation from a separated representation.
|
|
300
|
+
|
|
301
|
+
Parameters
|
|
302
|
+
----------
|
|
303
|
+
separated_field : list of numpy.ndarray
|
|
304
|
+
The separated representation. Every array is of shape :
|
|
305
|
+
(field, shape, ..., nb elem for dim 1, ..., nb elem for dim `npa`)
|
|
306
|
+
|
|
307
|
+
Returns
|
|
308
|
+
-------
|
|
309
|
+
unpacked_field : numpy.ndarray
|
|
310
|
+
The unpacked representation. Its shape is :
|
|
311
|
+
(field, shape, ..., `self`.`nb_nodes`)
|
|
312
|
+
"""
|
|
313
|
+
field_shape = separated_field[0].shape[: -self.npa]
|
|
314
|
+
assert np.all(
|
|
315
|
+
[f.shape[: -self.npa] == field_shape for f in separated_field]
|
|
316
|
+
), "Every patch must have the same field shape !"
|
|
317
|
+
unpacked_field = np.concatenate(
|
|
318
|
+
[f.reshape((*field_shape, -1)) for f in separated_field], axis=-1
|
|
319
|
+
)
|
|
320
|
+
return unpacked_field
|
|
321
|
+
|
|
322
|
+
def unique_field_indices(self, field_shape, representation="separated"):
|
|
323
|
+
"""
|
|
324
|
+
Get the unique, unpacked or separated representation of a field's unique indices.
|
|
325
|
+
|
|
326
|
+
Parameters
|
|
327
|
+
----------
|
|
328
|
+
field_shape : tuple of int
|
|
329
|
+
The shape of the field. For example, if it is a vector field, `field_shape`
|
|
330
|
+
should be (3,). If it is a second order tensor field, it should be (3, 3).
|
|
331
|
+
representation : str, optional
|
|
332
|
+
The user must choose between `"unique"`, `"unpacked"`, and `"separated"`.
|
|
333
|
+
It corresponds to the type of representation to get, by default "separated"
|
|
334
|
+
|
|
335
|
+
Returns
|
|
336
|
+
-------
|
|
337
|
+
unique_field_indices : numpy.ndarray of int or list of numpy.ndarray of int
|
|
338
|
+
The unique, unpacked or separated representation of a field's unique indices.
|
|
339
|
+
If unique, its shape is (*`field_shape`, `self`.`nb_unique_nodes`).
|
|
340
|
+
If unpacked, its shape is : (*`field_shape`, `self`.`nb_nodes`).
|
|
341
|
+
If separated, every array is of shape : (*`field_shape`, nb elem for dim 1, ..., nb elem for dim `npa`).
|
|
342
|
+
"""
|
|
343
|
+
nb_indices = np.prod(field_shape) * self.nb_unique_nodes
|
|
344
|
+
unique_field_indices_as_unique_field = np.arange(
|
|
345
|
+
nb_indices, dtype="int"
|
|
346
|
+
).reshape((*field_shape, self.nb_unique_nodes))
|
|
347
|
+
if representation == "unique":
|
|
348
|
+
unique_field_indices = unique_field_indices_as_unique_field
|
|
349
|
+
return unique_field_indices
|
|
350
|
+
elif representation == "unpacked":
|
|
351
|
+
unique_field_indices = self.unpack(unique_field_indices_as_unique_field)
|
|
352
|
+
return unique_field_indices
|
|
353
|
+
elif representation == "separated":
|
|
354
|
+
unique_field_indices = self.separate(
|
|
355
|
+
self.unpack(unique_field_indices_as_unique_field)
|
|
356
|
+
)
|
|
357
|
+
return unique_field_indices
|
|
358
|
+
else:
|
|
359
|
+
raise ValueError(
|
|
360
|
+
f'Representation "{representation}" not recognised. Representation must either be "unique", "unpacked", or "separated" !'
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
def get_duplicate_unpacked_nodes_mask(self):
|
|
364
|
+
"""
|
|
365
|
+
Returns a boolean mask indicating which nodes in the unpacked representation are duplicates.
|
|
366
|
+
|
|
367
|
+
Returns
|
|
368
|
+
-------
|
|
369
|
+
duplicate_nodes_mask : numpy.ndarray
|
|
370
|
+
Boolean mask of shape (nb_nodes,) where True indicates a node is duplicated
|
|
371
|
+
across multiple patches and False indicates it appears only once.
|
|
372
|
+
"""
|
|
373
|
+
_, inverse, counts = np.unique(
|
|
374
|
+
self.unique_nodes_inds, return_inverse=True, return_counts=True
|
|
375
|
+
)
|
|
376
|
+
duplicate_nodes_mask = counts[inverse] > 1
|
|
377
|
+
return duplicate_nodes_mask
|
|
378
|
+
|
|
379
|
+
def extract_exterior_borders(self, splines):
|
|
380
|
+
"""
|
|
381
|
+
Extract exterior borders from B-spline patches.
|
|
382
|
+
|
|
383
|
+
Parameters
|
|
384
|
+
----------
|
|
385
|
+
splines : list[BSpline]
|
|
386
|
+
Array of B-spline patches to extract borders from.
|
|
387
|
+
|
|
388
|
+
Returns
|
|
389
|
+
-------
|
|
390
|
+
border_connectivity : MultiPatchBSplineConnectivity
|
|
391
|
+
Connectivity information for the border patches.
|
|
392
|
+
border_splines : list[BSpline]
|
|
393
|
+
Array of B-spline patches representing the borders.
|
|
394
|
+
border_unique_to_self_unique_connectivity : numpy.ndarray of int
|
|
395
|
+
Array mapping border unique nodes to original unique nodes.
|
|
396
|
+
|
|
397
|
+
Raises
|
|
398
|
+
------
|
|
399
|
+
AssertionError
|
|
400
|
+
If isoparametric space dimension is less than 2.
|
|
401
|
+
"""
|
|
402
|
+
if self.npa <= 1:
|
|
403
|
+
raise AssertionError(
|
|
404
|
+
"The parametric space must be at least 2D to extract borders !"
|
|
405
|
+
)
|
|
406
|
+
duplicate_unpacked_nodes_mask = self.get_duplicate_unpacked_nodes_mask()
|
|
407
|
+
duplicate_separated_nodes_mask = self.separate(duplicate_unpacked_nodes_mask)
|
|
408
|
+
separated_unique_nodes_inds = self.unique_field_indices(())
|
|
409
|
+
arr = np.arange(self.npa).tolist()
|
|
410
|
+
border_splines = []
|
|
411
|
+
border_unique_nodes_inds = []
|
|
412
|
+
border_shape_by_patch = []
|
|
413
|
+
for i in range(self.nb_patchs):
|
|
414
|
+
spline = splines[i]
|
|
415
|
+
duplicate_nodes_mask_spline = duplicate_separated_nodes_mask[i]
|
|
416
|
+
unique_nodes_inds_spline = separated_unique_nodes_inds[i]
|
|
417
|
+
shape_by_patch_spline = self.shape_by_patch[i]
|
|
418
|
+
for axis in range(self.npa):
|
|
419
|
+
bases = np.hstack((spline.bases[(axis + 1) :], spline.bases[:axis]))
|
|
420
|
+
axes = arr[axis:-1] + arr[:axis]
|
|
421
|
+
border_shape_by_patch_spline = np.hstack(
|
|
422
|
+
(shape_by_patch_spline[(axis + 1) :], shape_by_patch_spline[:axis])
|
|
423
|
+
)
|
|
424
|
+
if not np.take(duplicate_nodes_mask_spline, 0, axis=axis).all():
|
|
425
|
+
bspline_border = BSpline.from_bases(bases[::-1])
|
|
426
|
+
border_splines.append(bspline_border)
|
|
427
|
+
unique_nodes_inds_spline_border = (
|
|
428
|
+
np.take(unique_nodes_inds_spline, 0, axis=axis)
|
|
429
|
+
.transpose(axes[::-1])
|
|
430
|
+
.ravel()
|
|
431
|
+
)
|
|
432
|
+
border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
433
|
+
border_shape_by_patch_spline_border = border_shape_by_patch_spline[
|
|
434
|
+
::-1
|
|
435
|
+
][None]
|
|
436
|
+
border_shape_by_patch.append(border_shape_by_patch_spline_border)
|
|
437
|
+
# print(f"side {0} of axis {axis} of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
438
|
+
if not np.take(duplicate_nodes_mask_spline, -1, axis=axis).all():
|
|
439
|
+
bspline_border = BSpline.from_bases(bases)
|
|
440
|
+
border_splines.append(bspline_border)
|
|
441
|
+
unique_nodes_inds_spline_border = (
|
|
442
|
+
np.take(unique_nodes_inds_spline, -1, axis=axis)
|
|
443
|
+
.transpose(axes)
|
|
444
|
+
.ravel()
|
|
445
|
+
)
|
|
446
|
+
border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
447
|
+
border_shape_by_patch_spline_border = border_shape_by_patch_spline[
|
|
448
|
+
None
|
|
449
|
+
]
|
|
450
|
+
border_shape_by_patch.append(border_shape_by_patch_spline_border)
|
|
451
|
+
# print(f"side {-1} of axis {axis} of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
452
|
+
border_splines = np.array(border_splines, dtype="object")
|
|
453
|
+
border_unique_nodes_inds = np.concatenate(border_unique_nodes_inds)
|
|
454
|
+
border_shape_by_patch = np.concatenate(border_shape_by_patch)
|
|
455
|
+
border_unique_to_self_unique_connectivity, inverse = np.unique(
|
|
456
|
+
border_unique_nodes_inds, return_inverse=True
|
|
457
|
+
)
|
|
458
|
+
border_unique_nodes_inds -= np.cumsum(
|
|
459
|
+
np.diff(np.concatenate(([-1], border_unique_to_self_unique_connectivity)))
|
|
460
|
+
- 1
|
|
461
|
+
)[inverse]
|
|
462
|
+
border_nb_unique_nodes = np.unique(border_unique_nodes_inds).size
|
|
463
|
+
border_connectivity = self.__class__(
|
|
464
|
+
border_unique_nodes_inds, border_shape_by_patch, border_nb_unique_nodes
|
|
465
|
+
)
|
|
466
|
+
return (
|
|
467
|
+
border_connectivity,
|
|
468
|
+
border_splines,
|
|
469
|
+
border_unique_to_self_unique_connectivity,
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
def extract_interior_borders(self, splines):
|
|
473
|
+
"""
|
|
474
|
+
Extract interior borders from B-spline patches where nodes are shared between patches.
|
|
475
|
+
|
|
476
|
+
Parameters
|
|
477
|
+
----------
|
|
478
|
+
splines : list[BSpline]
|
|
479
|
+
Array of B-spline patches to extract borders from.
|
|
480
|
+
|
|
481
|
+
Returns
|
|
482
|
+
-------
|
|
483
|
+
border_connectivity : MultiPatchBSplineConnectivity
|
|
484
|
+
Connectivity information for the border patches.
|
|
485
|
+
border_splines : list[BSpline]
|
|
486
|
+
Array of B-spline patches representing the borders.
|
|
487
|
+
border_unique_to_self_unique_connectivity : numpy.ndarray of int
|
|
488
|
+
Array mapping border unique nodes to original unique nodes.
|
|
489
|
+
|
|
490
|
+
Raises
|
|
491
|
+
------
|
|
492
|
+
AssertionError
|
|
493
|
+
If parametric space dimension is less than 2.
|
|
494
|
+
"""
|
|
495
|
+
if self.npa <= 1:
|
|
496
|
+
raise AssertionError(
|
|
497
|
+
"The parametric space must be at least 2D to extract borders !"
|
|
498
|
+
)
|
|
499
|
+
duplicate_unpacked_nodes_mask = self.get_duplicate_unpacked_nodes_mask()
|
|
500
|
+
duplicate_separated_nodes_mask = self.separate(duplicate_unpacked_nodes_mask)
|
|
501
|
+
separated_unique_nodes_inds = self.unique_field_indices(())
|
|
502
|
+
arr = np.arange(self.npa).tolist()
|
|
503
|
+
border_splines = []
|
|
504
|
+
border_unique_nodes_inds = []
|
|
505
|
+
border_shape_by_patch = []
|
|
506
|
+
for i in range(self.nb_patchs):
|
|
507
|
+
spline = splines[i]
|
|
508
|
+
duplicate_nodes_mask_spline = duplicate_separated_nodes_mask[i]
|
|
509
|
+
unique_nodes_inds_spline = separated_unique_nodes_inds[i]
|
|
510
|
+
shape_by_patch_spline = self.shape_by_patch[i]
|
|
511
|
+
for axis in range(self.npa):
|
|
512
|
+
bases = np.hstack((spline.bases[(axis + 1) :], spline.bases[:axis]))
|
|
513
|
+
axes = arr[axis:-1] + arr[:axis]
|
|
514
|
+
border_shape_by_patch_spline = np.hstack(
|
|
515
|
+
(shape_by_patch_spline[(axis + 1) :], shape_by_patch_spline[:axis])
|
|
516
|
+
)
|
|
517
|
+
if np.take(duplicate_nodes_mask_spline, 0, axis=axis).all():
|
|
518
|
+
bspline_border = BSpline.from_bases(bases[::-1])
|
|
519
|
+
border_splines.append(bspline_border)
|
|
520
|
+
unique_nodes_inds_spline_border = (
|
|
521
|
+
np.take(unique_nodes_inds_spline, 0, axis=axis)
|
|
522
|
+
.transpose(axes[::-1])
|
|
523
|
+
.ravel()
|
|
524
|
+
)
|
|
525
|
+
border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
526
|
+
border_shape_by_patch_spline_border = border_shape_by_patch_spline[
|
|
527
|
+
::-1
|
|
528
|
+
][None]
|
|
529
|
+
border_shape_by_patch.append(border_shape_by_patch_spline_border)
|
|
530
|
+
# print(f"side {0} of axis {axis} of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
531
|
+
if np.take(duplicate_nodes_mask_spline, -1, axis=axis).all():
|
|
532
|
+
bspline_border = BSpline.from_bases(bases)
|
|
533
|
+
border_splines.append(bspline_border)
|
|
534
|
+
unique_nodes_inds_spline_border = (
|
|
535
|
+
np.take(unique_nodes_inds_spline, -1, axis=axis)
|
|
536
|
+
.transpose(axes)
|
|
537
|
+
.ravel()
|
|
538
|
+
)
|
|
539
|
+
border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
540
|
+
border_shape_by_patch_spline_border = border_shape_by_patch_spline[
|
|
541
|
+
None
|
|
542
|
+
]
|
|
543
|
+
border_shape_by_patch.append(border_shape_by_patch_spline_border)
|
|
544
|
+
# print(f"side {-1} of axis {axis} of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
545
|
+
border_splines = np.array(border_splines, dtype="object")
|
|
546
|
+
border_unique_nodes_inds = np.concatenate(border_unique_nodes_inds)
|
|
547
|
+
border_shape_by_patch = np.concatenate(border_shape_by_patch)
|
|
548
|
+
border_unique_to_self_unique_connectivity, inverse = np.unique(
|
|
549
|
+
border_unique_nodes_inds, return_inverse=True
|
|
550
|
+
)
|
|
551
|
+
border_unique_nodes_inds -= np.cumsum(
|
|
552
|
+
np.diff(np.concatenate(([-1], border_unique_to_self_unique_connectivity)))
|
|
553
|
+
- 1
|
|
554
|
+
)[inverse]
|
|
555
|
+
border_nb_unique_nodes = np.unique(border_unique_nodes_inds).size
|
|
556
|
+
border_connectivity = self.__class__(
|
|
557
|
+
border_unique_nodes_inds, border_shape_by_patch, border_nb_unique_nodes
|
|
558
|
+
)
|
|
559
|
+
return (
|
|
560
|
+
border_connectivity,
|
|
561
|
+
border_splines,
|
|
562
|
+
border_unique_to_self_unique_connectivity,
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
# def extract_exterior_surfaces(self, splines):
|
|
566
|
+
# if self.npa!=3:
|
|
567
|
+
# raise AssertionError("The parametric space must be 3D to extract surfaces !")
|
|
568
|
+
# duplicate_unpacked_nodes_mask = self.get_duplicate_unpacked_nodes_mask()
|
|
569
|
+
# duplicate_separated_nodes_mask = self.separate(duplicate_unpacked_nodes_mask)
|
|
570
|
+
# separated_unique_nodes_inds = self.unique_field_indices(())
|
|
571
|
+
# arr = np.arange(self.npa).tolist()
|
|
572
|
+
# border_splines = []
|
|
573
|
+
# border_unique_nodes_inds = []
|
|
574
|
+
# border_shape_by_patch = []
|
|
575
|
+
# for i in range(self.nb_patchs):
|
|
576
|
+
# spline = splines[i]
|
|
577
|
+
# duplicate_nodes_mask_spline = duplicate_separated_nodes_mask[i]
|
|
578
|
+
# unique_nodes_inds_spline = separated_unique_nodes_inds[i]
|
|
579
|
+
# shape_by_patch_spline = self.shape_by_patch[i]
|
|
580
|
+
#
|
|
581
|
+
# # surface 1
|
|
582
|
+
# if not np.take(duplicate_nodes_mask_spline, 0, axis=0).all():
|
|
583
|
+
# bspline_border = BSpline.from_bases(spline.bases[:0:-1])
|
|
584
|
+
# border_splines.append(bspline_border)
|
|
585
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, 0, axis=0).T.ravel()
|
|
586
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
587
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[:0:-1][None]
|
|
588
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
589
|
+
# print(f"Surface 1 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
590
|
+
# # surface 2
|
|
591
|
+
# if not np.take(duplicate_nodes_mask_spline, -1, axis=0).all():
|
|
592
|
+
# bspline_border = BSpline.from_bases(spline.bases[1:])
|
|
593
|
+
# border_splines.append(bspline_border)
|
|
594
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, -1, axis=0).ravel()
|
|
595
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
596
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[1:][None]
|
|
597
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
598
|
+
# print(f"Surface 2 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
599
|
+
# # surface 3
|
|
600
|
+
# if not np.take(duplicate_nodes_mask_spline, 0, axis=1).all():
|
|
601
|
+
# bspline_border = BSpline.from_bases(spline.bases[::2])
|
|
602
|
+
# border_splines.append(bspline_border)
|
|
603
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, 0, axis=1).ravel()
|
|
604
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
605
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[::2][None]
|
|
606
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
607
|
+
# print(f"Surface 3 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
608
|
+
# # surface 4
|
|
609
|
+
# if not np.take(duplicate_nodes_mask_spline, -1, axis=1).all():
|
|
610
|
+
# bspline_border = BSpline.from_bases(spline.bases[::-2])
|
|
611
|
+
# border_splines.append(bspline_border)
|
|
612
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, -1, axis=1).T.ravel()
|
|
613
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
614
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[::-2][None]
|
|
615
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
616
|
+
# print(f"Surface 4 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
617
|
+
# # surface 5
|
|
618
|
+
# if not np.take(duplicate_nodes_mask_spline, 0, axis=2).all():
|
|
619
|
+
# bspline_border = BSpline.from_bases(spline.bases[1::-1])
|
|
620
|
+
# border_splines.append(bspline_border)
|
|
621
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, 0, axis=2).T.ravel()
|
|
622
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
623
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[1::-1][None]
|
|
624
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
625
|
+
# print(f"Surface 5 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
626
|
+
# # surface 6
|
|
627
|
+
# if not np.take(duplicate_nodes_mask_spline, -1, axis=2).all():
|
|
628
|
+
# bspline_border = BSpline.from_bases(spline.bases[:2])
|
|
629
|
+
# border_splines.append(bspline_border)
|
|
630
|
+
# unique_nodes_inds_spline_border = np.take(unique_nodes_inds_spline, -1, axis=2).ravel()
|
|
631
|
+
# border_unique_nodes_inds.append(unique_nodes_inds_spline_border)
|
|
632
|
+
# border_shape_by_patch_spline = shape_by_patch_spline[:2][None]
|
|
633
|
+
# border_shape_by_patch.append(border_shape_by_patch_spline)
|
|
634
|
+
# print(f"Surface 6 of patch {i} uses nodes {unique_nodes_inds_spline_border}")
|
|
635
|
+
# border_splines = np.array(border_splines, dtype='object')
|
|
636
|
+
# border_unique_nodes_inds = np.concatenate(border_unique_nodes_inds)
|
|
637
|
+
# border_shape_by_patch = np.concatenate(border_shape_by_patch)
|
|
638
|
+
# border_unique_to_self_unique_connectivity, inverse = np.unique(border_unique_nodes_inds, return_inverse=True)
|
|
639
|
+
# border_unique_nodes_inds -= np.cumsum(np.diff(np.concatenate(([-1], border_unique_to_self_unique_connectivity))) - 1)[inverse]
|
|
640
|
+
# border_nb_unique_nodes = np.unique(border_unique_nodes_inds).size
|
|
641
|
+
# border_connectivity = self.__class__(border_unique_nodes_inds, border_shape_by_patch, border_nb_unique_nodes)
|
|
642
|
+
# return border_connectivity, border_splines, border_unique_to_self_unique_connectivity
|
|
643
|
+
|
|
644
|
+
def subset(self, splines, patches_to_keep):
|
|
645
|
+
"""
|
|
646
|
+
Create a subset of the multi-patch B-spline connectivity by keeping only selected patches.
|
|
647
|
+
|
|
648
|
+
Parameters
|
|
649
|
+
----------
|
|
650
|
+
splines : list[BSpline]
|
|
651
|
+
Array of B-spline patches to subset.
|
|
652
|
+
patches_to_keep : numpy.array of int
|
|
653
|
+
Indices of patches to keep in the subset.
|
|
654
|
+
|
|
655
|
+
Returns
|
|
656
|
+
-------
|
|
657
|
+
new_connectivity : MultiPatchBSplineConnectivity
|
|
658
|
+
New connectivity object containing only the selected patches.
|
|
659
|
+
new_splines : list[BSpline]
|
|
660
|
+
Array of B-spline patches for the selected patches.
|
|
661
|
+
new_unique_to_self_unique_connectivity : numpy.ndarray of int
|
|
662
|
+
Array mapping new unique nodes to original unique nodes.
|
|
663
|
+
"""
|
|
664
|
+
new_splines = splines[patches_to_keep]
|
|
665
|
+
separated_unique_nodes_inds = self.unique_field_indices(())
|
|
666
|
+
new_unique_nodes_inds = np.concatenate(
|
|
667
|
+
[separated_unique_nodes_inds[patch].flat for patch in patches_to_keep]
|
|
668
|
+
)
|
|
669
|
+
new_shape_by_patch = self.shape_by_patch[patches_to_keep]
|
|
670
|
+
new_unique_to_self_unique_connectivity, inverse = np.unique(
|
|
671
|
+
new_unique_nodes_inds, return_inverse=True
|
|
672
|
+
)
|
|
673
|
+
new_unique_nodes_inds -= np.cumsum(
|
|
674
|
+
np.diff(np.concatenate(([-1], new_unique_to_self_unique_connectivity))) - 1
|
|
675
|
+
)[inverse]
|
|
676
|
+
new_nb_unique_nodes = np.unique(new_unique_nodes_inds).size
|
|
677
|
+
new_connectivity = self.__class__(
|
|
678
|
+
new_unique_nodes_inds, new_shape_by_patch, new_nb_unique_nodes
|
|
679
|
+
)
|
|
680
|
+
return new_connectivity, new_splines, new_unique_to_self_unique_connectivity
|
|
681
|
+
|
|
682
|
+
def make_control_poly_meshes(
|
|
683
|
+
self,
|
|
684
|
+
splines: Iterable[BSpline],
|
|
685
|
+
separated_ctrl_pts: Iterable[np.ndarray[np.floating]],
|
|
686
|
+
n_eval_per_elem: Union[Iterable[int], int] = 10,
|
|
687
|
+
n_step: int = 1,
|
|
688
|
+
unique_fields: dict = {},
|
|
689
|
+
separated_fields: Union[dict, None] = None,
|
|
690
|
+
XI_list: Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]] = None,
|
|
691
|
+
paraview_sizes: dict = {},
|
|
692
|
+
) -> list[io.Mesh]:
|
|
693
|
+
|
|
694
|
+
if type(n_eval_per_elem) is int:
|
|
695
|
+
n_eval_per_elem = [n_eval_per_elem] * self.npa
|
|
696
|
+
|
|
697
|
+
if separated_fields is None:
|
|
698
|
+
separated_fields = [{} for _ in range(self.nb_patchs)]
|
|
699
|
+
|
|
700
|
+
if XI_list is None:
|
|
701
|
+
XI_list = [None] * self.nb_patchs
|
|
702
|
+
|
|
703
|
+
for key, value in unique_fields.items():
|
|
704
|
+
if callable(value):
|
|
705
|
+
raise NotImplementedError(
|
|
706
|
+
"To handle functions as fields, use separated_fields !"
|
|
707
|
+
)
|
|
708
|
+
else:
|
|
709
|
+
if value.shape[-1] != self.nb_unique_nodes:
|
|
710
|
+
raise ValueError(
|
|
711
|
+
f"Field {key} of unique_fields is in a wrong format (not unique format)."
|
|
712
|
+
)
|
|
713
|
+
separated_value = self.separate(self.unpack(value))
|
|
714
|
+
for patch in range(self.nb_patchs):
|
|
715
|
+
separated_fields[patch][key] = separated_value[patch]
|
|
716
|
+
|
|
717
|
+
step_meshes = np.empty((self.nb_patchs, n_step), dtype=object)
|
|
718
|
+
for patch in range(self.nb_patchs):
|
|
719
|
+
step_meshes[patch] = splines[patch].make_control_poly_meshes(
|
|
720
|
+
separated_ctrl_pts[patch],
|
|
721
|
+
n_eval_per_elem=n_eval_per_elem,
|
|
722
|
+
n_step=n_step,
|
|
723
|
+
fields=separated_fields[patch],
|
|
724
|
+
XI=XI_list[patch],
|
|
725
|
+
paraview_sizes=paraview_sizes,
|
|
726
|
+
)
|
|
727
|
+
for mesh in step_meshes[patch]:
|
|
728
|
+
mesh.point_data["patch_id"] = np.full(
|
|
729
|
+
mesh.points.shape[0], patch, dtype=int
|
|
730
|
+
)
|
|
731
|
+
meshes = []
|
|
732
|
+
for time_step_meshes in step_meshes.T:
|
|
733
|
+
meshes.append(merge_meshes(time_step_meshes))
|
|
734
|
+
return meshes
|
|
735
|
+
|
|
736
|
+
def make_elem_separator_meshes(
|
|
737
|
+
self,
|
|
738
|
+
splines: Iterable[BSpline],
|
|
739
|
+
separated_ctrl_pts: Iterable[np.ndarray[np.floating]],
|
|
740
|
+
n_eval_per_elem: Union[Iterable[int], int] = 10,
|
|
741
|
+
n_step: int = 1,
|
|
742
|
+
unique_fields: dict = {},
|
|
743
|
+
separated_fields: Union[dict, None] = None,
|
|
744
|
+
XI_list: Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]] = None,
|
|
745
|
+
paraview_sizes: dict = {},
|
|
746
|
+
parallel: bool = True,
|
|
747
|
+
verbose: bool = True,
|
|
748
|
+
) -> list[io.Mesh]:
|
|
749
|
+
|
|
750
|
+
if type(n_eval_per_elem) is int:
|
|
751
|
+
n_eval_per_elem = [n_eval_per_elem] * self.npa
|
|
752
|
+
|
|
753
|
+
if separated_fields is None:
|
|
754
|
+
separated_fields = [{} for _ in range(self.nb_patchs)]
|
|
755
|
+
|
|
756
|
+
if XI_list is None:
|
|
757
|
+
XI_list = [None] * self.nb_patchs
|
|
758
|
+
|
|
759
|
+
for key, value in unique_fields.items():
|
|
760
|
+
if callable(value):
|
|
761
|
+
raise NotImplementedError(
|
|
762
|
+
"To handle functions as fields, use separated_fields !"
|
|
763
|
+
)
|
|
764
|
+
else:
|
|
765
|
+
if value.shape[-1] != self.nb_unique_nodes:
|
|
766
|
+
raise ValueError(
|
|
767
|
+
f"Field {key} of unique_fields is in a wrong format (not unique format)."
|
|
768
|
+
)
|
|
769
|
+
separated_value = self.separate(self.unpack(value))
|
|
770
|
+
for patch in range(self.nb_patchs):
|
|
771
|
+
separated_fields[patch][key] = separated_value[patch]
|
|
772
|
+
|
|
773
|
+
funcs = [splines[i].make_elem_separator_meshes for i in range(self.nb_patchs)]
|
|
774
|
+
all_args = [
|
|
775
|
+
(
|
|
776
|
+
separated_ctrl_pts[i],
|
|
777
|
+
n_eval_per_elem,
|
|
778
|
+
n_step,
|
|
779
|
+
separated_fields[i],
|
|
780
|
+
XI_list[i],
|
|
781
|
+
paraview_sizes,
|
|
782
|
+
)
|
|
783
|
+
for i in range(self.nb_patchs)
|
|
784
|
+
]
|
|
785
|
+
|
|
786
|
+
elem_separator_meshes = parallel_blocks(
|
|
787
|
+
funcs,
|
|
788
|
+
all_args,
|
|
789
|
+
verbose=verbose,
|
|
790
|
+
pbar_title="Making elements separators meshes",
|
|
791
|
+
disable_parallel=not parallel,
|
|
792
|
+
)
|
|
793
|
+
|
|
794
|
+
for patch, step_meshes in enumerate(elem_separator_meshes):
|
|
795
|
+
for timestep, mesh in enumerate(step_meshes):
|
|
796
|
+
mesh.point_data["patch_id"] = np.full(
|
|
797
|
+
mesh.points.shape[0], patch, dtype=int
|
|
798
|
+
)
|
|
799
|
+
meshes = []
|
|
800
|
+
for timestep, patch_meshes in enumerate(zip(*elem_separator_meshes)):
|
|
801
|
+
meshes.append(merge_meshes(patch_meshes))
|
|
802
|
+
|
|
803
|
+
return meshes
|
|
804
|
+
|
|
805
|
+
def make_elements_interior_meshes(
|
|
806
|
+
self,
|
|
807
|
+
splines: Iterable[BSpline],
|
|
808
|
+
separated_ctrl_pts: Iterable[np.ndarray[np.floating]],
|
|
809
|
+
n_eval_per_elem: Union[Iterable[int], int] = 10,
|
|
810
|
+
n_step: int = 1,
|
|
811
|
+
unique_fields: dict = {},
|
|
812
|
+
separated_fields: Union[dict, None] = None,
|
|
813
|
+
XI_list: Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]] = None,
|
|
814
|
+
parallel: bool = True,
|
|
815
|
+
verbose: bool = True,
|
|
816
|
+
) -> list[io.Mesh]:
|
|
817
|
+
if type(n_eval_per_elem) is int:
|
|
818
|
+
n_eval_per_elem = [n_eval_per_elem] * self.npa
|
|
819
|
+
|
|
820
|
+
if separated_fields is None:
|
|
821
|
+
separated_fields = [{} for _ in range(self.nb_patchs)]
|
|
822
|
+
|
|
823
|
+
if XI_list is None:
|
|
824
|
+
XI_list = [None] * self.nb_patchs
|
|
825
|
+
|
|
826
|
+
for key, value in unique_fields.items():
|
|
827
|
+
if callable(value):
|
|
828
|
+
raise NotImplementedError(
|
|
829
|
+
"To handle functions as fields, use separated_fields !"
|
|
830
|
+
)
|
|
831
|
+
else:
|
|
832
|
+
if value.shape[-1] != self.nb_unique_nodes:
|
|
833
|
+
raise ValueError(
|
|
834
|
+
f"Field {key} of unique_fields is in a wrong format (not unique format)."
|
|
835
|
+
)
|
|
836
|
+
separated_value = self.separate(self.unpack(value))
|
|
837
|
+
for patch in range(self.nb_patchs):
|
|
838
|
+
separated_fields[patch][key] = separated_value[patch]
|
|
839
|
+
|
|
840
|
+
funcs = [
|
|
841
|
+
splines[i].make_elements_interior_meshes for i in range(self.nb_patchs)
|
|
842
|
+
]
|
|
843
|
+
all_args = [
|
|
844
|
+
(
|
|
845
|
+
separated_ctrl_pts[i],
|
|
846
|
+
n_eval_per_elem,
|
|
847
|
+
n_step,
|
|
848
|
+
separated_fields[i],
|
|
849
|
+
XI_list[i],
|
|
850
|
+
)
|
|
851
|
+
for i in range(self.nb_patchs)
|
|
852
|
+
]
|
|
853
|
+
|
|
854
|
+
interior_meshes = parallel_blocks(
|
|
855
|
+
funcs,
|
|
856
|
+
all_args,
|
|
857
|
+
verbose=verbose,
|
|
858
|
+
pbar_title="Making interior meshes",
|
|
859
|
+
disable_parallel=not parallel,
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
for patch, step_meshes in enumerate(interior_meshes):
|
|
863
|
+
for timestep, mesh in enumerate(step_meshes):
|
|
864
|
+
mesh.point_data["patch_id"] = np.full(
|
|
865
|
+
mesh.points.shape[0], patch, dtype=int
|
|
866
|
+
)
|
|
867
|
+
meshes = []
|
|
868
|
+
for timestep, patch_meshes in enumerate(zip(*interior_meshes)):
|
|
869
|
+
meshes.append(merge_meshes(patch_meshes))
|
|
870
|
+
|
|
871
|
+
return meshes
|
|
872
|
+
|
|
873
|
+
def make_all_meshes(
|
|
874
|
+
self,
|
|
875
|
+
splines: Iterable[BSpline],
|
|
876
|
+
separated_ctrl_pts: Iterable[np.ndarray[np.floating]],
|
|
877
|
+
n_step: int = 1,
|
|
878
|
+
n_eval_per_elem: Union[int, Iterable[int]] = 10,
|
|
879
|
+
unique_fields: dict = {},
|
|
880
|
+
separated_fields: Union[list[dict], None] = None,
|
|
881
|
+
XI_list: Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]] = None,
|
|
882
|
+
verbose: bool = True,
|
|
883
|
+
fields_on_interior_only: Union[bool, Literal["auto"], list[str]] = "auto",
|
|
884
|
+
disable_parallel: bool = False,
|
|
885
|
+
) -> tuple[list[io.Mesh], list[io.Mesh], list[io.Mesh]]:
|
|
886
|
+
"""
|
|
887
|
+
Generate all mesh representations (interior, element borders, and control points) for a multipatch B-spline geometry.
|
|
888
|
+
|
|
889
|
+
This method creates three types of meshes for visualization or analysis:
|
|
890
|
+
- The interior mesh representing the B-spline surface or volume.
|
|
891
|
+
- The element separator mesh showing the borders between elements.
|
|
892
|
+
- The control polygon mesh showing the control structure.
|
|
893
|
+
|
|
894
|
+
Parameters
|
|
895
|
+
----------
|
|
896
|
+
splines : Iterable[BSpline]
|
|
897
|
+
List of B-spline patches to process.
|
|
898
|
+
separated_ctrl_pts : Iterable[np.ndarray[np.floating]]
|
|
899
|
+
Control points for each patch in separated representation.
|
|
900
|
+
n_step : int, optional
|
|
901
|
+
Number of time steps to generate. By default, 1.
|
|
902
|
+
n_eval_per_elem : Union[int, Iterable[int]], optional
|
|
903
|
+
Number of evaluation points per element for each isoparametric dimension.
|
|
904
|
+
If an `int` is provided, the same number is used for all dimensions.
|
|
905
|
+
If an `Iterable` is provided, each value corresponds to a different dimension.
|
|
906
|
+
By default, 10.
|
|
907
|
+
unique_fields : dict, optional
|
|
908
|
+
Fields in unique representation to visualize. By default, `{}`.
|
|
909
|
+
Keys are field names, values are arrays (not callables nor FE fields).
|
|
910
|
+
separated_fields : Union[list[dict], None], optional
|
|
911
|
+
Fields to visualize at each time step.
|
|
912
|
+
List of `self.nb_patchs` dictionaries (one per patch) of format:
|
|
913
|
+
{
|
|
914
|
+
"field_name": `field_value`
|
|
915
|
+
}
|
|
916
|
+
where `field_value` can be either:
|
|
917
|
+
1. A `np.ndarray` with shape (`n_step`, `field_size`, `self.shape_by_patch[patch]`)
|
|
918
|
+
2. A `np.ndarray` with shape (`n_step`, `field_size`, `*grid_shape`)
|
|
919
|
+
3. A function that computes field values (`np.ndarray[np.floating]`) at given points from the `BSpline` instance and `XI`.
|
|
920
|
+
By default, None.
|
|
921
|
+
XI_list : Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]], optional
|
|
922
|
+
Parametric coordinates at which to evaluate the B-spline patches and fields.
|
|
923
|
+
If not `None`, overrides the `n_eval_per_elem` parameter.
|
|
924
|
+
If `None`, regular grids are generated according to `n_eval_per_elem`.
|
|
925
|
+
By default, None.
|
|
926
|
+
verbose : bool, optional
|
|
927
|
+
Whether to print progress information. By default, True.
|
|
928
|
+
fiels_on_interior_only: Union[bool, Literal['auto'], list[str]], optionnal
|
|
929
|
+
Whether to include fields only on the interior mesh (`True`), on all meshes (`False`),
|
|
930
|
+
or on specified field names.
|
|
931
|
+
If set to `'auto'`, fields named `'u'`, `'U'`, `'displacement'` or `'displ'`
|
|
932
|
+
are included on all meshes while others are only included on the interior mesh.
|
|
933
|
+
By default, 'auto'.
|
|
934
|
+
disable_parallel : bool, optional
|
|
935
|
+
Wether to disable parallel execution. By default, False.
|
|
936
|
+
|
|
937
|
+
Returns
|
|
938
|
+
-------
|
|
939
|
+
tuple[list[io.Mesh], list[io.Mesh], list[io.Mesh]]
|
|
940
|
+
Tuple containing three lists of `io.Mesh` objects:
|
|
941
|
+
- Interior meshes for each time step.
|
|
942
|
+
- Element separator meshes for each time step.
|
|
943
|
+
- Control polygon meshes for each time step.
|
|
944
|
+
|
|
945
|
+
Raises
|
|
946
|
+
------
|
|
947
|
+
NotImplementedError
|
|
948
|
+
If a callable is passed in `unique_fields`.
|
|
949
|
+
ValueError
|
|
950
|
+
If a field in `unique_fields` does not have the correct shape.
|
|
951
|
+
|
|
952
|
+
Notes
|
|
953
|
+
-----
|
|
954
|
+
- The isoparametric space refers to the parametric space of the B-splines.
|
|
955
|
+
- Fields can be visualized as scalars or vectors.
|
|
956
|
+
- Supports time-dependent visualization through `n_step`.
|
|
957
|
+
- Fields in `unique_fields` must be arrays; to use callables, use `separated_fields`.
|
|
958
|
+
|
|
959
|
+
Examples
|
|
960
|
+
--------
|
|
961
|
+
>>> interior, borders, control = connectivity.make_all_meshes(splines, separated_ctrl_pts)
|
|
962
|
+
"""
|
|
963
|
+
|
|
964
|
+
if type(n_eval_per_elem) is int:
|
|
965
|
+
n_eval_per_elem = [n_eval_per_elem] * self.npa
|
|
966
|
+
|
|
967
|
+
if separated_fields is None:
|
|
968
|
+
separated_fields = [{} for _ in range(self.nb_patchs)]
|
|
969
|
+
|
|
970
|
+
if XI_list is None:
|
|
971
|
+
XI_list = [None] * self.nb_patchs
|
|
972
|
+
|
|
973
|
+
for key, value in unique_fields.items():
|
|
974
|
+
if callable(value):
|
|
975
|
+
raise NotImplementedError(
|
|
976
|
+
"To handle functions as fields, use separated_fields !"
|
|
977
|
+
)
|
|
978
|
+
else:
|
|
979
|
+
if value.shape[-1] != self.nb_unique_nodes:
|
|
980
|
+
raise ValueError(
|
|
981
|
+
f"Field {key} of unique_fields is in a wrong format (not unique format)."
|
|
982
|
+
)
|
|
983
|
+
separated_value = self.separate(self.unpack(value))
|
|
984
|
+
for patch in range(self.nb_patchs):
|
|
985
|
+
separated_fields[patch][key] = separated_value[patch]
|
|
986
|
+
|
|
987
|
+
paraview_sizes = {}
|
|
988
|
+
fields = separated_fields[0]
|
|
989
|
+
if fields_on_interior_only is True:
|
|
990
|
+
for key, value in fields.items():
|
|
991
|
+
if callable(value):
|
|
992
|
+
paraview_sizes[key] = value(
|
|
993
|
+
splines[0], np.zeros((splines[0].NPa, 1))
|
|
994
|
+
).shape[2]
|
|
995
|
+
else:
|
|
996
|
+
paraview_sizes[key] = value.shape[1]
|
|
997
|
+
elif fields_on_interior_only is False:
|
|
998
|
+
pass
|
|
999
|
+
elif fields_on_interior_only == "auto":
|
|
1000
|
+
for key, value in fields.items():
|
|
1001
|
+
if key not in ["u", "U", "displacement", "displ"]:
|
|
1002
|
+
if callable(value):
|
|
1003
|
+
paraview_sizes[key] = value(
|
|
1004
|
+
splines[0], np.zeros((splines[0].NPa, 1))
|
|
1005
|
+
).shape[2]
|
|
1006
|
+
else:
|
|
1007
|
+
paraview_sizes[key] = value.shape[1]
|
|
1008
|
+
else:
|
|
1009
|
+
for key in fields_on_interior_only:
|
|
1010
|
+
value = fields[key]
|
|
1011
|
+
if callable(value):
|
|
1012
|
+
paraview_sizes[key] = value(
|
|
1013
|
+
splines[0], np.zeros((splines[0].NPa, 1))
|
|
1014
|
+
).shape[2]
|
|
1015
|
+
else:
|
|
1016
|
+
paraview_sizes[key] = value.shape[1]
|
|
1017
|
+
|
|
1018
|
+
elem_interior_meshes = self.make_elements_interior_meshes(
|
|
1019
|
+
splines,
|
|
1020
|
+
separated_ctrl_pts,
|
|
1021
|
+
n_eval_per_elem,
|
|
1022
|
+
n_step,
|
|
1023
|
+
separated_fields=separated_fields,
|
|
1024
|
+
XI_list=XI_list,
|
|
1025
|
+
verbose=verbose,
|
|
1026
|
+
parallel=(not disable_parallel),
|
|
1027
|
+
)
|
|
1028
|
+
if verbose:
|
|
1029
|
+
print("interior done")
|
|
1030
|
+
|
|
1031
|
+
elem_separator_meshes = self.make_elem_separator_meshes(
|
|
1032
|
+
splines,
|
|
1033
|
+
separated_ctrl_pts,
|
|
1034
|
+
n_eval_per_elem,
|
|
1035
|
+
n_step,
|
|
1036
|
+
separated_fields=separated_fields,
|
|
1037
|
+
XI_list=XI_list,
|
|
1038
|
+
paraview_sizes=paraview_sizes,
|
|
1039
|
+
parallel=(not disable_parallel),
|
|
1040
|
+
)
|
|
1041
|
+
if verbose:
|
|
1042
|
+
print("elements borders done")
|
|
1043
|
+
|
|
1044
|
+
control_poly_meshes = self.make_control_poly_meshes(
|
|
1045
|
+
splines,
|
|
1046
|
+
separated_ctrl_pts,
|
|
1047
|
+
n_eval_per_elem,
|
|
1048
|
+
n_step,
|
|
1049
|
+
separated_fields=separated_fields,
|
|
1050
|
+
XI_list=XI_list,
|
|
1051
|
+
paraview_sizes=paraview_sizes,
|
|
1052
|
+
)
|
|
1053
|
+
if verbose:
|
|
1054
|
+
print("control points done")
|
|
1055
|
+
|
|
1056
|
+
return elem_interior_meshes, elem_separator_meshes, control_poly_meshes
|
|
1057
|
+
|
|
1058
|
+
def save_paraview(
|
|
1059
|
+
self,
|
|
1060
|
+
splines: Iterable[BSpline],
|
|
1061
|
+
separated_ctrl_pts: Iterable[np.ndarray[np.floating]],
|
|
1062
|
+
path: str,
|
|
1063
|
+
name: str,
|
|
1064
|
+
n_step: int = 1,
|
|
1065
|
+
n_eval_per_elem: Union[int, Iterable[int]] = 10,
|
|
1066
|
+
unique_fields: dict = {},
|
|
1067
|
+
separated_fields: Union[list[dict], None] = None,
|
|
1068
|
+
XI_list: Union[None, Iterable[tuple[np.ndarray[np.floating], ...]]] = None,
|
|
1069
|
+
groups: Union[dict[str, dict[str, Union[str, int]]], None] = None,
|
|
1070
|
+
make_pvd: bool = True,
|
|
1071
|
+
verbose: bool = True,
|
|
1072
|
+
fields_on_interior_only: Union[bool, Literal["auto"], list[str]] = "auto",
|
|
1073
|
+
disable_parallel: bool = False,
|
|
1074
|
+
):
|
|
1075
|
+
"""
|
|
1076
|
+
Save multipatch B-spline visualization data as Paraview files.
|
|
1077
|
+
|
|
1078
|
+
This method generates three types of visualization files for a multipatch B-spline geometry:
|
|
1079
|
+
- Interior mesh showing the B-spline surface/volume
|
|
1080
|
+
- Element borders showing the mesh structure
|
|
1081
|
+
- Control points mesh showing the control structure
|
|
1082
|
+
|
|
1083
|
+
All files are saved in VTU format, with an optional PVD file to group them for Paraview.
|
|
1084
|
+
|
|
1085
|
+
Parameters
|
|
1086
|
+
----------
|
|
1087
|
+
splines : Iterable[BSpline]
|
|
1088
|
+
List of B-spline patches to save.
|
|
1089
|
+
separated_ctrl_pts : Iterable[np.ndarray[np.floating]]
|
|
1090
|
+
Control points for each patch in separated representation.
|
|
1091
|
+
path : str
|
|
1092
|
+
Directory path where the files will be saved.
|
|
1093
|
+
name : str
|
|
1094
|
+
Base name for the output files.
|
|
1095
|
+
n_step : int, optional
|
|
1096
|
+
Number of time steps to save. By default, 1.
|
|
1097
|
+
n_eval_per_elem : Union[int, Iterable[int]], optional
|
|
1098
|
+
Number of evaluation points per element for each isoparametric dimension.
|
|
1099
|
+
If an `int` is provided, the same number is used for all dimensions.
|
|
1100
|
+
If an `Iterable` is provided, each value corresponds to a different dimension.
|
|
1101
|
+
By default, 10.
|
|
1102
|
+
unique_fields : dict, optional
|
|
1103
|
+
Fields in unique representation to save. By default, `{}`.
|
|
1104
|
+
Keys are field names, values are arrays (not callables nor FE fields).
|
|
1105
|
+
separated_fields : Union[list[dict], None], optional
|
|
1106
|
+
Fields to visualize at each time step.
|
|
1107
|
+
List of `self.nb_patchs` dictionaries (one per patch) of format:
|
|
1108
|
+
{
|
|
1109
|
+
"field_name": `field_value`
|
|
1110
|
+
}
|
|
1111
|
+
where `field_value` can be either:
|
|
1112
|
+
|
|
1113
|
+
1. A numpy array with shape (`n_step`, `field_size`, `self.shape_by_patch[patch]`) where:
|
|
1114
|
+
- `n_step`: Number of time steps
|
|
1115
|
+
- `field_size`: Size of the field at each point (1 for scalar, 3 for vector)
|
|
1116
|
+
- `self.shape_by_patch[patch]`: Same shape as the patch's control points grid (excluding `NPh`)
|
|
1117
|
+
|
|
1118
|
+
2. A numpy array with shape (`n_step`, `field_size`, `*grid_shape`) where:
|
|
1119
|
+
- `n_step`: Number of time steps
|
|
1120
|
+
- `field_size`: Size of the field at each point (1 for scalar, 3 for vector)
|
|
1121
|
+
- `*grid_shape`: Shape of the evaluation grid (number of points along each isoparametric axis)
|
|
1122
|
+
|
|
1123
|
+
3. A function that computes field values (`np.ndarray[np.floating]`) at given
|
|
1124
|
+
points from the `BSpline` instance and `XI`, the tuple of arrays containing evaluation
|
|
1125
|
+
points for each dimension (`tuple[np.ndarray[np.floating], ...]`).
|
|
1126
|
+
The result should be an array of shape (`n_step`, `n_points`, `field_size`) where:
|
|
1127
|
+
- `n_step`: Number of time steps
|
|
1128
|
+
- `n_points`: Number of evaluation points (n_xi × n_eta × ...)
|
|
1129
|
+
- `field_size`: Size of the field at each point (1 for scalar, 3 for vector)
|
|
1130
|
+
|
|
1131
|
+
By default, None.
|
|
1132
|
+
XI_list : Iterable[tuple[np.ndarray[np.floating], ...]], optional
|
|
1133
|
+
Parametric coordinates at which to evaluate the B-spline patches and fields.
|
|
1134
|
+
If not `None`, overrides the `n_eval_per_elem` parameter.
|
|
1135
|
+
If `None`, regular grids are generated according to `n_eval_per_elem`.
|
|
1136
|
+
groups : Union[dict[str, dict[str, Union[str, int]]], None], optional
|
|
1137
|
+
Nested dictionary specifying file groups for PVD organization. Format:
|
|
1138
|
+
{
|
|
1139
|
+
"group_name": {
|
|
1140
|
+
"ext": str, # File extension (e.g., "vtu")
|
|
1141
|
+
"npart": int, # Number of parts in the group
|
|
1142
|
+
"nstep": int # Number of timesteps
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
If provided, existing groups are updated; if `None`, groups are created automatically.
|
|
1146
|
+
By default, `None`.
|
|
1147
|
+
make_pvd : bool, optional
|
|
1148
|
+
Whether to create a PVD file grouping all VTU files. By default, `True`.
|
|
1149
|
+
verbose : bool, optional
|
|
1150
|
+
Whether to print progress information. By default, `True`.
|
|
1151
|
+
fields_on_interior_only: Union[bool, Literal['auto'], list[str]], optionnal
|
|
1152
|
+
Whether to include fields only on the interior mesh (`True`), on all meshes (`False`),
|
|
1153
|
+
or on specified field names.
|
|
1154
|
+
If set to `'auto'`, fields named `'u'`, `'U'`, `'displacement'` or `'displ'`
|
|
1155
|
+
are included on all meshes while others are only included on the interior mesh.
|
|
1156
|
+
By default, 'auto'.
|
|
1157
|
+
disable_parallel : bool, optional
|
|
1158
|
+
Whether to disable the parallel execution. By default, False.
|
|
1159
|
+
|
|
1160
|
+
Returns
|
|
1161
|
+
-------
|
|
1162
|
+
groups : dict[str, dict[str, Union[str, int]]]
|
|
1163
|
+
Updated groups dictionary with information about saved files.
|
|
1164
|
+
|
|
1165
|
+
Raises
|
|
1166
|
+
------
|
|
1167
|
+
NotImplementedError
|
|
1168
|
+
If a callable is passed in `unique_fields`.
|
|
1169
|
+
ValueError
|
|
1170
|
+
If the multiprocessing pool is not running and cannot be restarted.
|
|
1171
|
+
|
|
1172
|
+
Notes
|
|
1173
|
+
-----
|
|
1174
|
+
- Creates three types of VTU files for each time step:
|
|
1175
|
+
- {name}_interior_{part}_{step}.vtu
|
|
1176
|
+
- {name}_elements_borders_{part}_{step}.vtu
|
|
1177
|
+
- {name}_control_points_{part}_{step}.vtu
|
|
1178
|
+
- If `make_pvd=True`, creates a PVD file named {name}.pvd.
|
|
1179
|
+
- Fields can be visualized as scalars or vectors in Paraview.
|
|
1180
|
+
- The method supports time-dependent visualization through `n_step`.
|
|
1181
|
+
- Fields in `unique_fields` must be arrays; to use callables, use `separated_fields`.
|
|
1182
|
+
|
|
1183
|
+
Examples
|
|
1184
|
+
--------
|
|
1185
|
+
Save a multipatch B-spline visualization:
|
|
1186
|
+
>>> connectivity.save_paraview(splines, separated_ctrl_pts, "./output", "multipatch")
|
|
1187
|
+
|
|
1188
|
+
Save with a custom separated field on a 2 patches multipatch:
|
|
1189
|
+
>>> fields = [{"temperature": np.random.rand(1, 4, 4)}, {"temperature": np.random.rand(1, 7, 3)}]
|
|
1190
|
+
>>> connectivity.save_paraview(splines, separated_ctrl_pts, "./output", "multipatch", separated_fields=fields)
|
|
1191
|
+
"""
|
|
1192
|
+
|
|
1193
|
+
if groups is None:
|
|
1194
|
+
groups = {}
|
|
1195
|
+
|
|
1196
|
+
interior = "interior"
|
|
1197
|
+
if interior in groups:
|
|
1198
|
+
groups[interior]["npart"] += 1
|
|
1199
|
+
else:
|
|
1200
|
+
groups[interior] = {"ext": "vtu", "npart": 1, "nstep": n_step}
|
|
1201
|
+
elements_borders = "elements_borders"
|
|
1202
|
+
if elements_borders in groups:
|
|
1203
|
+
groups[elements_borders]["npart"] += 1
|
|
1204
|
+
else:
|
|
1205
|
+
groups[elements_borders] = {"ext": "vtu", "npart": 1, "nstep": n_step}
|
|
1206
|
+
control_points = "control_points"
|
|
1207
|
+
if control_points in groups:
|
|
1208
|
+
groups[control_points]["npart"] += 1
|
|
1209
|
+
else:
|
|
1210
|
+
groups[control_points] = {"ext": "vtu", "npart": 1, "nstep": n_step}
|
|
1211
|
+
|
|
1212
|
+
elem_interior_meshes, elem_separator_meshes, control_poly_meshes = (
|
|
1213
|
+
self.make_all_meshes(
|
|
1214
|
+
splines,
|
|
1215
|
+
separated_ctrl_pts,
|
|
1216
|
+
n_step,
|
|
1217
|
+
n_eval_per_elem,
|
|
1218
|
+
unique_fields,
|
|
1219
|
+
separated_fields,
|
|
1220
|
+
XI_list,
|
|
1221
|
+
verbose,
|
|
1222
|
+
fields_on_interior_only,
|
|
1223
|
+
disable_parallel=disable_parallel,
|
|
1224
|
+
)
|
|
1225
|
+
)
|
|
1226
|
+
|
|
1227
|
+
prefix = os.path.join(
|
|
1228
|
+
path, f"{name}_{interior}_{groups[interior]['npart'] - 1}"
|
|
1229
|
+
)
|
|
1230
|
+
for time_step, mesh in enumerate(elem_interior_meshes):
|
|
1231
|
+
mesh.write(f"{prefix}_{time_step}.vtu")
|
|
1232
|
+
if verbose:
|
|
1233
|
+
print(interior, "saved")
|
|
1234
|
+
|
|
1235
|
+
prefix = os.path.join(
|
|
1236
|
+
path, f"{name}_{elements_borders}_{groups[elements_borders]['npart'] - 1}"
|
|
1237
|
+
)
|
|
1238
|
+
for time_step, mesh in enumerate(elem_separator_meshes):
|
|
1239
|
+
mesh.write(f"{prefix}_{time_step}.vtu")
|
|
1240
|
+
if verbose:
|
|
1241
|
+
print(elements_borders, "saved")
|
|
1242
|
+
|
|
1243
|
+
prefix = os.path.join(
|
|
1244
|
+
path, f"{name}_{control_points}_{groups[control_points]['npart'] - 1}"
|
|
1245
|
+
)
|
|
1246
|
+
for time_step, mesh in enumerate(control_poly_meshes):
|
|
1247
|
+
mesh.write(f"{prefix}_{time_step}.vtu")
|
|
1248
|
+
if verbose:
|
|
1249
|
+
print(control_points, "saved")
|
|
1250
|
+
|
|
1251
|
+
if make_pvd:
|
|
1252
|
+
writePVD(os.path.join(path, name), groups)
|
|
1253
|
+
|
|
1254
|
+
return groups
|
|
1255
|
+
|
|
1256
|
+
|
|
1257
|
+
# def save_YETI(self, splines, separated_ctrl_pts, path, name):
|
|
1258
|
+
# if self.npa==2:
|
|
1259
|
+
# el_type = "U3"
|
|
1260
|
+
# elif self.npa==3:
|
|
1261
|
+
# el_type = "U1"
|
|
1262
|
+
# else:
|
|
1263
|
+
# raise NotImplementedError("Can only save surfaces or volumes !")
|
|
1264
|
+
# objects_list = []
|
|
1265
|
+
# for patch in range(self.nb_patchs):
|
|
1266
|
+
# geomdl_patch = splines[patch].getGeomdl(separated_ctrl_pts[patch])
|
|
1267
|
+
# obj = Domain.DefaultDomain(geometry=geomdl_patch,
|
|
1268
|
+
# id_dom=patch,
|
|
1269
|
+
# elem_type=el_type)
|
|
1270
|
+
# objects_list.append(obj)
|
|
1271
|
+
# write.write_files(objects_list, os.path.join(path, name))
|
|
1272
|
+
|
|
1273
|
+
|
|
1274
|
+
# %%
|
|
1275
|
+
class CouplesBSplineBorder:
|
|
1276
|
+
|
|
1277
|
+
def __init__(
|
|
1278
|
+
self,
|
|
1279
|
+
spline1_inds,
|
|
1280
|
+
spline2_inds,
|
|
1281
|
+
axes1,
|
|
1282
|
+
axes2,
|
|
1283
|
+
front_sides1,
|
|
1284
|
+
front_sides2,
|
|
1285
|
+
transpose_2_to_1,
|
|
1286
|
+
flip_2_to_1,
|
|
1287
|
+
NPa,
|
|
1288
|
+
):
|
|
1289
|
+
self.spline1_inds = spline1_inds
|
|
1290
|
+
self.spline2_inds = spline2_inds
|
|
1291
|
+
self.axes1 = axes1
|
|
1292
|
+
self.axes2 = axes2
|
|
1293
|
+
self.front_sides1 = front_sides1
|
|
1294
|
+
self.front_sides2 = front_sides2
|
|
1295
|
+
self.transpose_2_to_1 = transpose_2_to_1
|
|
1296
|
+
self.flip_2_to_1 = flip_2_to_1
|
|
1297
|
+
self.NPa = NPa
|
|
1298
|
+
self.nb_couples = self.flip_2_to_1.shape[0]
|
|
1299
|
+
|
|
1300
|
+
@classmethod
|
|
1301
|
+
def extract_border_pts(cls, field, axis, front_side, field_dim=1, offset=0):
|
|
1302
|
+
npa = field.ndim - field_dim
|
|
1303
|
+
base_face = np.hstack((np.arange(axis + 1, npa), np.arange(axis)))
|
|
1304
|
+
if not front_side:
|
|
1305
|
+
base_face = base_face[::-1]
|
|
1306
|
+
border_field = field.transpose(
|
|
1307
|
+
axis + field_dim, *np.arange(field_dim), *(base_face + field_dim)
|
|
1308
|
+
)[(-(1 + offset) if front_side else offset)]
|
|
1309
|
+
return border_field
|
|
1310
|
+
|
|
1311
|
+
@classmethod
|
|
1312
|
+
def extract_border_spline(cls, spline, axis, front_side):
|
|
1313
|
+
base_face = np.hstack((np.arange(axis + 1, spline.NPa), np.arange(axis)))
|
|
1314
|
+
if not front_side:
|
|
1315
|
+
base_face = base_face[::-1]
|
|
1316
|
+
degrees = spline.getDegrees()
|
|
1317
|
+
knots = spline.getKnots()
|
|
1318
|
+
border_degrees = [degrees[i] for i in base_face]
|
|
1319
|
+
border_knots = [knots[i] for i in base_face]
|
|
1320
|
+
border_spline = BSpline(border_degrees, border_knots)
|
|
1321
|
+
return border_spline
|
|
1322
|
+
|
|
1323
|
+
@classmethod
|
|
1324
|
+
def transpose_and_flip(cls, field, transpose, flip, field_dim=1):
|
|
1325
|
+
field = field.transpose(*np.arange(field_dim), *(transpose + field_dim))
|
|
1326
|
+
for i in range(flip.size):
|
|
1327
|
+
if flip[i]:
|
|
1328
|
+
field = np.flip(field, axis=(i + field_dim))
|
|
1329
|
+
return field
|
|
1330
|
+
|
|
1331
|
+
@classmethod
|
|
1332
|
+
def transpose_and_flip_knots(cls, knots, spans, transpose, flip):
|
|
1333
|
+
new_knots = []
|
|
1334
|
+
for i in range(flip.size):
|
|
1335
|
+
if flip[i]:
|
|
1336
|
+
new_knots.append(sum(spans[i]) - knots[transpose[i]][::-1])
|
|
1337
|
+
else:
|
|
1338
|
+
new_knots.append(knots[transpose[i]])
|
|
1339
|
+
return new_knots
|
|
1340
|
+
|
|
1341
|
+
@classmethod
|
|
1342
|
+
def transpose_and_flip_back_knots(cls, knots, spans, transpose, flip):
|
|
1343
|
+
transpose_back = np.argsort(transpose)
|
|
1344
|
+
flip_back = flip[transpose_back]
|
|
1345
|
+
return cls.transpose_and_flip_knots(knots, spans, transpose_back, flip_back)
|
|
1346
|
+
|
|
1347
|
+
@classmethod
|
|
1348
|
+
def transpose_and_flip_spline(cls, spline, transpose, flip):
|
|
1349
|
+
spans = spline.getSpans()
|
|
1350
|
+
knots = spline.getKnots()
|
|
1351
|
+
degrees = spline.getDegrees()
|
|
1352
|
+
for i in range(flip.size):
|
|
1353
|
+
p = degrees[transpose[i]]
|
|
1354
|
+
knot = knots[transpose[i]]
|
|
1355
|
+
if flip[i]:
|
|
1356
|
+
knot = sum(spans[i]) - knot[::-1]
|
|
1357
|
+
spline.bases[i] = BSplineBasis(p, knot)
|
|
1358
|
+
return spline
|
|
1359
|
+
|
|
1360
|
+
@classmethod
|
|
1361
|
+
def from_splines(cls, separated_ctrl_pts, splines):
|
|
1362
|
+
NPa = splines[0].NPa
|
|
1363
|
+
assert np.all(
|
|
1364
|
+
[sp.NPa == NPa for sp in splines]
|
|
1365
|
+
), "Every patch should have the same parametric space dimension !"
|
|
1366
|
+
NPh = separated_ctrl_pts[0].shape[0]
|
|
1367
|
+
assert np.all(
|
|
1368
|
+
[ctrl_pts.shape[0] == NPh for ctrl_pts in separated_ctrl_pts]
|
|
1369
|
+
), "Every patch should have the same physical space dimension !"
|
|
1370
|
+
npatch = len(splines)
|
|
1371
|
+
all_flip = np.unpackbits(
|
|
1372
|
+
np.arange(2 ** (NPa - 1), dtype="uint8")[:, None],
|
|
1373
|
+
axis=1,
|
|
1374
|
+
count=(NPa - 1 - 8),
|
|
1375
|
+
bitorder="little",
|
|
1376
|
+
)[:, ::-1].astype("bool")
|
|
1377
|
+
all_transpose = np.array(list(permutations(np.arange(NPa - 1))))
|
|
1378
|
+
spline1_inds = []
|
|
1379
|
+
spline2_inds = []
|
|
1380
|
+
axes1 = []
|
|
1381
|
+
axes2 = []
|
|
1382
|
+
front_sides1 = []
|
|
1383
|
+
front_sides2 = []
|
|
1384
|
+
transpose_2_to_1 = []
|
|
1385
|
+
flip_2_to_1 = []
|
|
1386
|
+
for spline1_ind in range(npatch):
|
|
1387
|
+
spline1 = splines[spline1_ind]
|
|
1388
|
+
ctrl_pts1 = separated_ctrl_pts[spline1_ind]
|
|
1389
|
+
# print(f"sp1 {spline1_ind}")
|
|
1390
|
+
for spline2_ind in range(spline1_ind + 1, npatch):
|
|
1391
|
+
spline2 = splines[spline2_ind]
|
|
1392
|
+
ctrl_pts2 = separated_ctrl_pts[spline2_ind]
|
|
1393
|
+
# print(f"|sp2 {spline2_ind}")
|
|
1394
|
+
for axis1 in range(spline1.NPa):
|
|
1395
|
+
degrees1 = np.hstack(
|
|
1396
|
+
(
|
|
1397
|
+
spline1.getDegrees()[(axis1 + 1) :],
|
|
1398
|
+
spline1.getDegrees()[:axis1],
|
|
1399
|
+
)
|
|
1400
|
+
)
|
|
1401
|
+
knots1 = (
|
|
1402
|
+
spline1.getKnots()[(axis1 + 1) :] + spline1.getKnots()[:axis1]
|
|
1403
|
+
)
|
|
1404
|
+
# print(f"||ax1 {axis1}")
|
|
1405
|
+
for axis2 in range(spline2.NPa):
|
|
1406
|
+
degrees2 = np.hstack(
|
|
1407
|
+
(
|
|
1408
|
+
spline2.getDegrees()[(axis2 + 1) :],
|
|
1409
|
+
spline2.getDegrees()[:axis2],
|
|
1410
|
+
)
|
|
1411
|
+
)
|
|
1412
|
+
knots2 = (
|
|
1413
|
+
spline2.getKnots()[(axis2 + 1) :]
|
|
1414
|
+
+ spline2.getKnots()[:axis2]
|
|
1415
|
+
)
|
|
1416
|
+
spans2 = (
|
|
1417
|
+
spline2.getSpans()[(axis2 + 1) :]
|
|
1418
|
+
+ spline2.getSpans()[:axis2]
|
|
1419
|
+
)
|
|
1420
|
+
# print(f"|||ax2 {axis2}")
|
|
1421
|
+
for front_side1 in [False, True]:
|
|
1422
|
+
pts1 = cls.extract_border_pts(ctrl_pts1, axis1, front_side1)
|
|
1423
|
+
# print(f"||||{'front' if front_side1 else 'back '} side1")
|
|
1424
|
+
for front_side2 in [False, True]:
|
|
1425
|
+
pts2 = cls.extract_border_pts(
|
|
1426
|
+
ctrl_pts2, axis2, front_side2
|
|
1427
|
+
)
|
|
1428
|
+
# print(f"|||||{'front' if front_side2 else 'back '} side2")
|
|
1429
|
+
for transpose in all_transpose:
|
|
1430
|
+
# print(f"||||||transpose {transpose}")
|
|
1431
|
+
if (
|
|
1432
|
+
degrees1 == [degrees2[i] for i in transpose]
|
|
1433
|
+
).all():
|
|
1434
|
+
# print(f"||||||same degrees {degrees1}")
|
|
1435
|
+
if list(pts1.shape[1:]) == [
|
|
1436
|
+
pts2.shape[1:][i] for i in transpose
|
|
1437
|
+
]:
|
|
1438
|
+
# print(f"||||||same shapes {pts1.shape[1:]}")
|
|
1439
|
+
if np.all(
|
|
1440
|
+
[
|
|
1441
|
+
knots1[i].size
|
|
1442
|
+
== knots2[transpose[i]].size
|
|
1443
|
+
for i in range(NPa - 1)
|
|
1444
|
+
]
|
|
1445
|
+
):
|
|
1446
|
+
# print(f"||||||same knots sizes {[knots1[i].size for i in range(NPa - 1)]}")
|
|
1447
|
+
for flip in all_flip:
|
|
1448
|
+
# print(f"|||||||flip {flip}")
|
|
1449
|
+
if np.all(
|
|
1450
|
+
[
|
|
1451
|
+
(k1 == k2).all()
|
|
1452
|
+
for k1, k2 in zip(
|
|
1453
|
+
knots1,
|
|
1454
|
+
cls.transpose_and_flip_knots(
|
|
1455
|
+
knots2,
|
|
1456
|
+
spans2,
|
|
1457
|
+
transpose,
|
|
1458
|
+
flip,
|
|
1459
|
+
),
|
|
1460
|
+
)
|
|
1461
|
+
]
|
|
1462
|
+
):
|
|
1463
|
+
# print(f"|||||||same knots {knots1}")
|
|
1464
|
+
pts2_turned = (
|
|
1465
|
+
cls.transpose_and_flip(
|
|
1466
|
+
pts2, transpose, flip
|
|
1467
|
+
)
|
|
1468
|
+
)
|
|
1469
|
+
if np.allclose(
|
|
1470
|
+
pts1, pts2_turned
|
|
1471
|
+
):
|
|
1472
|
+
# print("_________________GOGOGO_________________")
|
|
1473
|
+
spline1_inds.append(
|
|
1474
|
+
spline1_ind
|
|
1475
|
+
)
|
|
1476
|
+
spline2_inds.append(
|
|
1477
|
+
spline2_ind
|
|
1478
|
+
)
|
|
1479
|
+
axes1.append(axis1)
|
|
1480
|
+
axes2.append(axis2)
|
|
1481
|
+
front_sides1.append(
|
|
1482
|
+
front_side1
|
|
1483
|
+
)
|
|
1484
|
+
front_sides2.append(
|
|
1485
|
+
front_side2
|
|
1486
|
+
)
|
|
1487
|
+
transpose_2_to_1.append(
|
|
1488
|
+
transpose
|
|
1489
|
+
)
|
|
1490
|
+
flip_2_to_1.append(flip)
|
|
1491
|
+
spline1_inds = np.array(spline1_inds, dtype="int")
|
|
1492
|
+
spline2_inds = np.array(spline2_inds, dtype="int")
|
|
1493
|
+
axes1 = np.array(axes1, dtype="int")
|
|
1494
|
+
axes2 = np.array(axes2, dtype="int")
|
|
1495
|
+
front_sides1 = np.array(front_sides1, dtype="bool")
|
|
1496
|
+
front_sides2 = np.array(front_sides2, dtype="bool")
|
|
1497
|
+
transpose_2_to_1 = np.array(transpose_2_to_1, dtype="int")
|
|
1498
|
+
flip_2_to_1 = np.array(flip_2_to_1, dtype="bool")
|
|
1499
|
+
return cls(
|
|
1500
|
+
spline1_inds,
|
|
1501
|
+
spline2_inds,
|
|
1502
|
+
axes1,
|
|
1503
|
+
axes2,
|
|
1504
|
+
front_sides1,
|
|
1505
|
+
front_sides2,
|
|
1506
|
+
transpose_2_to_1,
|
|
1507
|
+
flip_2_to_1,
|
|
1508
|
+
NPa,
|
|
1509
|
+
)
|
|
1510
|
+
|
|
1511
|
+
def append(self, other):
|
|
1512
|
+
if self.NPa != other.NPa:
|
|
1513
|
+
raise ValueError(
|
|
1514
|
+
f"operands could not be concatenated with parametric spaces of dimensions {self.NPa} and {other.NPa}"
|
|
1515
|
+
)
|
|
1516
|
+
self.spline1_inds = np.concatenate(
|
|
1517
|
+
(self.spline1_inds, other.spline1_inds), axis=0
|
|
1518
|
+
)
|
|
1519
|
+
self.spline2_inds = np.concatenate(
|
|
1520
|
+
(self.spline2_inds, other.spline2_inds), axis=0
|
|
1521
|
+
)
|
|
1522
|
+
self.axes1 = np.concatenate((self.axes1, other.axes1), axis=0)
|
|
1523
|
+
self.axes2 = np.concatenate((self.axes2, other.axes2), axis=0)
|
|
1524
|
+
self.front_sides1 = np.concatenate(
|
|
1525
|
+
(self.front_sides1, other.front_sides1), axis=0
|
|
1526
|
+
)
|
|
1527
|
+
self.front_sides2 = np.concatenate(
|
|
1528
|
+
(self.front_sides2, other.front_sides2), axis=0
|
|
1529
|
+
)
|
|
1530
|
+
self.transpose_2_to_1 = np.concatenate(
|
|
1531
|
+
(self.transpose_2_to_1, other.transpose_2_to_1), axis=0
|
|
1532
|
+
)
|
|
1533
|
+
self.flip_2_to_1 = np.concatenate((self.flip_2_to_1, other.flip_2_to_1), axis=0)
|
|
1534
|
+
self.nb_couples += other.nb_couples
|
|
1535
|
+
|
|
1536
|
+
def get_operator_allxi1_to_allxi2(self, spans1, spans2, couple_ind):
|
|
1537
|
+
ax1 = self.axes1[couple_ind]
|
|
1538
|
+
ax2 = self.axes2[couple_ind]
|
|
1539
|
+
front1 = self.front_sides1[couple_ind]
|
|
1540
|
+
front2 = self.front_sides2[couple_ind]
|
|
1541
|
+
transpose = self.transpose_2_to_1[couple_ind]
|
|
1542
|
+
flip = self.flip_2_to_1[couple_ind]
|
|
1543
|
+
|
|
1544
|
+
A = np.zeros((self.NPa, self.NPa), dtype="float")
|
|
1545
|
+
A[ax2, ax1] = -1 if front1 == front2 else 1
|
|
1546
|
+
arr = np.arange(self.NPa)
|
|
1547
|
+
j1 = np.hstack((arr[(ax1 + 1) :], arr[:ax1]))
|
|
1548
|
+
j2 = np.hstack((arr[(ax2 + 1) :], arr[:ax2]))
|
|
1549
|
+
A[j2[transpose], j1] = [-1 if f else 1 for f in flip]
|
|
1550
|
+
b = np.zeros(self.NPa, dtype="float")
|
|
1551
|
+
b[ax2] = (int(front1) + int(front2)) * (1 if front2 else -1)
|
|
1552
|
+
b[j2[transpose]] = [1 if f else 0 for f in flip]
|
|
1553
|
+
|
|
1554
|
+
alpha1, beta1 = np.array(spans1).T
|
|
1555
|
+
M1, p1 = np.diag(1 / (beta1 - alpha1)), -alpha1 / (beta1 - alpha1)
|
|
1556
|
+
alpha2, beta2 = np.array(spans2).T
|
|
1557
|
+
M2, p2 = np.diag(beta2 - alpha2), alpha2
|
|
1558
|
+
b = p2 + M2 @ b + M2 @ A @ p1
|
|
1559
|
+
A = M2 @ A @ M1
|
|
1560
|
+
|
|
1561
|
+
return A, b
|
|
1562
|
+
|
|
1563
|
+
def get_connectivity(self, shape_by_patch):
|
|
1564
|
+
indices = []
|
|
1565
|
+
start = 0
|
|
1566
|
+
for shape in shape_by_patch:
|
|
1567
|
+
end = start + np.prod(shape)
|
|
1568
|
+
indices.append(np.arange(start, end).reshape(shape))
|
|
1569
|
+
start = end
|
|
1570
|
+
nodes_couples = []
|
|
1571
|
+
for i in range(self.nb_couples):
|
|
1572
|
+
border_inds1 = self.__class__.extract_border_pts(
|
|
1573
|
+
indices[self.spline1_inds[i]],
|
|
1574
|
+
self.axes1[i],
|
|
1575
|
+
self.front_sides1[i],
|
|
1576
|
+
field_dim=0,
|
|
1577
|
+
)
|
|
1578
|
+
border_inds2 = self.__class__.extract_border_pts(
|
|
1579
|
+
indices[self.spline2_inds[i]],
|
|
1580
|
+
self.axes2[i],
|
|
1581
|
+
self.front_sides2[i],
|
|
1582
|
+
field_dim=0,
|
|
1583
|
+
)
|
|
1584
|
+
border_inds2_turned_and_fliped = self.__class__.transpose_and_flip(
|
|
1585
|
+
border_inds2, self.transpose_2_to_1[i], self.flip_2_to_1[i], field_dim=0
|
|
1586
|
+
)
|
|
1587
|
+
nodes_couples.append(
|
|
1588
|
+
np.hstack(
|
|
1589
|
+
(
|
|
1590
|
+
border_inds1.reshape((-1, 1)),
|
|
1591
|
+
border_inds2_turned_and_fliped.reshape((-1, 1)),
|
|
1592
|
+
)
|
|
1593
|
+
)
|
|
1594
|
+
)
|
|
1595
|
+
if len(nodes_couples) > 0:
|
|
1596
|
+
nodes_couples = np.vstack(nodes_couples)
|
|
1597
|
+
return MultiPatchBSplineConnectivity.from_nodes_couples(
|
|
1598
|
+
nodes_couples, shape_by_patch
|
|
1599
|
+
)
|
|
1600
|
+
|
|
1601
|
+
def get_borders_couples(self, separated_field, offset=0):
|
|
1602
|
+
field_dim = separated_field[0].ndim - self.NPa
|
|
1603
|
+
borders1 = []
|
|
1604
|
+
borders2_turned_and_fliped = []
|
|
1605
|
+
for i in range(self.nb_couples):
|
|
1606
|
+
border1 = self.__class__.extract_border_pts(
|
|
1607
|
+
separated_field[self.spline1_inds[i]],
|
|
1608
|
+
self.axes1[i],
|
|
1609
|
+
self.front_sides1[i],
|
|
1610
|
+
offset=offset,
|
|
1611
|
+
field_dim=field_dim,
|
|
1612
|
+
)
|
|
1613
|
+
borders1.append(border1)
|
|
1614
|
+
border2 = self.__class__.extract_border_pts(
|
|
1615
|
+
separated_field[self.spline2_inds[i]],
|
|
1616
|
+
self.axes2[i],
|
|
1617
|
+
self.front_sides2[i],
|
|
1618
|
+
offset=offset,
|
|
1619
|
+
field_dim=field_dim,
|
|
1620
|
+
)
|
|
1621
|
+
border2_turned_and_fliped = self.__class__.transpose_and_flip(
|
|
1622
|
+
border2,
|
|
1623
|
+
self.transpose_2_to_1[i],
|
|
1624
|
+
self.flip_2_to_1[i],
|
|
1625
|
+
field_dim=field_dim,
|
|
1626
|
+
)
|
|
1627
|
+
borders2_turned_and_fliped.append(border2_turned_and_fliped)
|
|
1628
|
+
return borders1, borders2_turned_and_fliped
|
|
1629
|
+
|
|
1630
|
+
def get_borders_couples_splines(self, splines):
|
|
1631
|
+
borders1 = []
|
|
1632
|
+
borders2_turned_and_fliped = []
|
|
1633
|
+
for i in range(self.nb_couples):
|
|
1634
|
+
border1 = self.__class__.extract_border_spline(
|
|
1635
|
+
splines[self.spline1_inds[i]], self.axes1[i], self.front_sides1[i]
|
|
1636
|
+
)
|
|
1637
|
+
borders1.append(border1)
|
|
1638
|
+
border2 = self.__class__.extract_border_spline(
|
|
1639
|
+
splines[self.spline2_inds[i]], self.axes2[i], self.front_sides2[i]
|
|
1640
|
+
)
|
|
1641
|
+
border2_turned_and_fliped = self.__class__.transpose_and_flip_spline(
|
|
1642
|
+
border2, self.transpose_2_to_1[i], self.flip_2_to_1[i]
|
|
1643
|
+
)
|
|
1644
|
+
borders2_turned_and_fliped.append(border2_turned_and_fliped)
|
|
1645
|
+
return borders1, borders2_turned_and_fliped
|
|
1646
|
+
|
|
1647
|
+
def compute_border_couple_DN(
|
|
1648
|
+
self,
|
|
1649
|
+
couple_ind: int,
|
|
1650
|
+
splines: list[BSpline],
|
|
1651
|
+
XI1_border: list[np.ndarray],
|
|
1652
|
+
k1: list[int],
|
|
1653
|
+
):
|
|
1654
|
+
spline1 = splines[self.spline1_inds[couple_ind]]
|
|
1655
|
+
ax1 = self.axes1[couple_ind]
|
|
1656
|
+
front1 = self.front_sides1[couple_ind]
|
|
1657
|
+
spline2 = splines[self.spline2_inds[couple_ind]]
|
|
1658
|
+
ax2 = self.axes2[couple_ind]
|
|
1659
|
+
front2 = self.front_sides2[couple_ind]
|
|
1660
|
+
XI1 = (
|
|
1661
|
+
XI1_border[(self.NPa - 1 - ax1) :]
|
|
1662
|
+
+ [np.array([spline1.bases[ax1].span[int(front1)]])]
|
|
1663
|
+
+ XI1_border[: (self.NPa - 1 - ax1)]
|
|
1664
|
+
)
|
|
1665
|
+
transpose_back = np.argsort(self.transpose_2_to_1[couple_ind])
|
|
1666
|
+
flip_back = self.flip_2_to_1[couple_ind][transpose_back]
|
|
1667
|
+
spans = spline2.getSpans()[(ax2 + 1) :] + spline2.getSpans()[:ax2]
|
|
1668
|
+
XI2_border = [
|
|
1669
|
+
(
|
|
1670
|
+
(sum(spans[i]) - XI1_border[transpose_back[i]])
|
|
1671
|
+
if flip_back[i]
|
|
1672
|
+
else XI1_border[transpose_back[i]]
|
|
1673
|
+
)
|
|
1674
|
+
for i in range(self.NPa - 1)
|
|
1675
|
+
]
|
|
1676
|
+
XI2 = (
|
|
1677
|
+
XI2_border[(self.NPa - 1 - ax2) :]
|
|
1678
|
+
+ [np.array([spline2.bases[ax2].span[int(front2)]])]
|
|
1679
|
+
+ XI2_border[: (self.NPa - 1 - ax2)]
|
|
1680
|
+
)
|
|
1681
|
+
k2 = k1[: self.axes1[couple_ind]] + k1[(self.axes1[couple_ind] + 1) :]
|
|
1682
|
+
k2 = [k2[i] for i in transpose_back]
|
|
1683
|
+
k2 = (
|
|
1684
|
+
k2[: self.axes2[couple_ind]]
|
|
1685
|
+
+ [k1[self.axes1[couple_ind]]]
|
|
1686
|
+
+ k2[self.axes2[couple_ind] :]
|
|
1687
|
+
)
|
|
1688
|
+
DN1 = spline1.DN(XI1, k=k1)
|
|
1689
|
+
DN2 = spline2.DN(XI2, k=k2)
|
|
1690
|
+
return DN1, DN2
|
|
1691
|
+
|
|
1692
|
+
def compute_border_couple_DN(
|
|
1693
|
+
self,
|
|
1694
|
+
couple_ind: int,
|
|
1695
|
+
splines: list[BSpline],
|
|
1696
|
+
XI1_border: list[np.ndarray],
|
|
1697
|
+
k1: list[int],
|
|
1698
|
+
):
|
|
1699
|
+
spline1 = splines[self.spline1_inds[couple_ind]]
|
|
1700
|
+
spans1 = spline1.getSpans()
|
|
1701
|
+
ax1 = self.axes1[couple_ind]
|
|
1702
|
+
front1 = self.front_sides1[couple_ind]
|
|
1703
|
+
XI1 = (
|
|
1704
|
+
XI1_border[(self.NPa - 1 - ax1) :]
|
|
1705
|
+
+ [np.array([spline1.bases[ax1].span[int(front1)]])]
|
|
1706
|
+
+ XI1_border[: (self.NPa - 1 - ax1)]
|
|
1707
|
+
)
|
|
1708
|
+
DN1 = spline1.DN(XI1, k=k1)
|
|
1709
|
+
|
|
1710
|
+
spline2 = splines[self.spline2_inds[couple_ind]]
|
|
1711
|
+
spans2 = spline2.getSpans()
|
|
1712
|
+
ax2 = self.axes2[couple_ind]
|
|
1713
|
+
front2 = self.front_sides2[couple_ind]
|
|
1714
|
+
transpose = self.transpose_2_to_1[couple_ind]
|
|
1715
|
+
A, b = self.get_operator_allxi1_to_allxi2(spans1, spans2, couple_ind)
|
|
1716
|
+
XI2 = []
|
|
1717
|
+
for i in range(self.NPa):
|
|
1718
|
+
j = np.argmax(np.abs(A[i]))
|
|
1719
|
+
XI2.append(A[i, j] * XI1[j] + b[i])
|
|
1720
|
+
|
|
1721
|
+
k = int(sum(k1))
|
|
1722
|
+
DN2 = spline2.DN(XI2, k=k)
|
|
1723
|
+
if k != 0:
|
|
1724
|
+
AT = 1
|
|
1725
|
+
for i in range(k):
|
|
1726
|
+
AT = np.tensordot(AT, A, 0)
|
|
1727
|
+
AT = AT.transpose(*2 * np.arange(k), *(2 * np.arange(k) + 1))
|
|
1728
|
+
DN2 = np.tensordot(DN2, AT, k)
|
|
1729
|
+
i1 = np.repeat(np.arange(self.NPa), k1)
|
|
1730
|
+
DN2 = DN2[tuple(i1.tolist())]
|
|
1731
|
+
return DN1, DN2
|