-
Notifications
You must be signed in to change notification settings - Fork 83
/
mesh.py
1046 lines (876 loc) · 35.1 KB
/
mesh.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import logging
import importlib
from dataclasses import dataclass, replace
from typing import Callable, Dict, List, Optional, Tuple, Type, Union
import numpy as np
from numpy import ndarray
from ..element import BOUNDARY_ELEMENT_MAP, Element
from ..generic_utils import OrientedBoundary
logger = logging.getLogger(__name__)
@dataclass(repr=False)
class Mesh:
"""A mesh defined by :class:`~skfem.element.Element` class.
:class:`~skfem.mesh.Mesh` is defined as a combination of elements/cells by
specifying the spatial locations of the finite element nodes.
"""
doflocs: ndarray #: The locations of the finite element nodes
t: ndarray #: The connectivity of the elements/cells
_boundaries: Optional[Dict[str, ndarray]] = None
_subdomains: Optional[Dict[str, ndarray]] = None
elem: Type[Element] = Element
affine: bool = False #: Use :class:`~skfem.mapping.MappingAffine`?
# Some parts of the library, most notably the normal vector construction in
# ElementGlobal._eval_dofs, assume that the element indices are ascending
# because this leads to consistent normal vectors for both elements sharing
# a facet. Therefore, the element indices are sorted in a triangle mesh.
# However, some algorithms (e.g., adaptive refinement) require switching
# off this behaviour and, hence, this flag exists.
sort_t: bool = False
validate: bool = True # run validation check if log_level<=DEBUG
@property
def p(self):
return self.doflocs
@property
def dofs(self):
from skfem.assembly import Dofs
if not hasattr(self, '_dofs'):
self._dofs = Dofs(self, self.elem())
return self._dofs
@property
def refdom(self):
return self.elem.refdom
@property
def brefdom(self):
return self.elem.refdom.brefdom
@property
def bndelem(self):
if self.elem in BOUNDARY_ELEMENT_MAP:
return BOUNDARY_ELEMENT_MAP[self.elem]()
return None
@property
def nelements(self):
return self.t.shape[1]
@property
def nvertices(self):
return np.max(self.t) + 1
@property
def nfacets(self):
return self.facets.shape[1]
@property
def nedges(self):
return self.edges.shape[1]
@property
def nnodes(self):
return self.t.shape[0]
@property
def subdomains(self):
return self._subdomains
@property
def boundaries(self):
return self._boundaries
@property
def facets(self):
if not hasattr(self, '_facets'):
self._init_facets()
return self._facets
@property
def t2f(self):
if not hasattr(self, '_t2f'):
self._init_facets()
return self._t2f
@property
def f2t(self):
if not hasattr(self, '_f2t'):
self._f2t = self.build_inverse(self.t, self.t2f)
return self._f2t
@property
def edges(self):
if not hasattr(self, '_edges'):
self._init_edges()
return self._edges
@property
def t2e(self):
if not hasattr(self, '_t2e'):
self._init_edges()
return self._t2e
def dim(self):
return self.elem.refdom.dim()
def boundary_facets(self) -> ndarray:
"""Return an array of boundary facet indices."""
return np.nonzero(self.f2t[1] == -1)[0]
def boundary_edges(self) -> ndarray:
"""Return an array of boundary edge indices."""
facets = self.boundary_facets()
boundary_edges = np.sort(np.hstack(
tuple([np.vstack((self.facets[itr, facets],
self.facets[(itr + 1) % self.facets.shape[0],
facets]))
for itr in range(self.facets.shape[0])])).T, axis=1)
edge_candidates = np.unique(self.t2e[:, self.f2t[0, facets]])
A = self.edges[:, edge_candidates].T
B = boundary_edges
dims = A.max(0) + 1
ix = np.where(np.in1d(
np.ravel_multi_index(A.T, dims), # type: ignore
np.ravel_multi_index(B.T, dims), # type: ignore
))[0]
return edge_candidates[ix]
def with_boundaries(self,
boundaries: Dict[str, Union[Callable[[ndarray],
ndarray],
ndarray]],
boundaries_only: bool = True):
"""Return a copy of the mesh with named boundaries.
Parameters
----------
boundaries
A dictionary of index arrays or lambda functions with the names of
the boundaries as keys. The midpoint of the facet should return
``True`` for the corresponding lambda function if the facet belongs
to the boundary.
boundaries_only
If ``True``, consider only facets on the boundary of the domain.
"""
return replace(
self,
_boundaries={
**({} if self._boundaries is None else self._boundaries),
**{name: self.facets_satisfying(test_or_set, boundaries_only)
if callable(test_or_set) else test_or_set
for name, test_or_set in boundaries.items()}
},
)
def with_subdomains(self,
subdomains: Dict[str, Callable[[ndarray], ndarray]]):
"""Return a copy of the mesh with named subdomains.
Parameters
----------
boundaries
A dictionary of lambda functions with the names of the subdomains
as keys. The midpoint of the element should return ``True`` for
the corresponding lambda function if the element belongs to the
subdomain.
"""
return replace(
self,
_subdomains={
**({} if self._subdomains is None else self._subdomains),
**{name: self.elements_satisfying(test)
for name, test in subdomains.items()},
},
)
def _encode_point_data(self) -> Dict[str, List[ndarray]]:
subdomains = {} if self._subdomains is None else self._subdomains
boundaries = {} if self._boundaries is None else self._boundaries
def indicator(ix):
ind = np.zeros(self.nvertices)
ind[ix] = 1
return ind
return {
**{
f"skfem:s:{name}": indicator(np.unique(self.t[:, subdomain]
.flatten()))
for name, subdomain in subdomains.items()
},
**{
f"skfem:b:{name}": indicator(np.unique(self.facets[:, boundary]
.flatten()))
for name, boundary in boundaries.items()
},
}
def _encode_cell_data(self) -> Dict[str, List[ndarray]]:
subdomains = {} if self._subdomains is None else self._subdomains
boundaries = {} if self._boundaries is None else self._boundaries
def encode_boundary(boundary: Union[ndarray, OrientedBoundary]) -> ndarray:
if hasattr(boundary, "ori"):
ori = boundary.ori.astype(bool)
cells = np.vstack([np.isin(self.t2f, boundary[o]) for o in [ori, ~ori]])
else:
cells = np.isin(self.t2f, boundary)
return (1 << np.arange(cells.shape[0])) @ cells
return {
**{
f"skfem:s:{name}": [
np.isin(np.arange(self.t.shape[1]), subdomain).astype(int)
]
for name, subdomain in subdomains.items()
},
**{
f"skfem:b:{name}": [encode_boundary(boundary)]
for name, boundary in boundaries.items()
},
}
def _decode_cell_data(self, cell_data: Dict[str, List[ndarray]]):
subdomains = {}
boundaries = {}
for name, data in cell_data.items():
subnames = name.split(":")
if subnames[0] != "skfem":
continue
if subnames[1] == "s":
subdomains[subnames[2]] = np.nonzero(data[0])[0]
elif subnames[1] == "b":
if data[0].max() > (2 ** self.t2f.shape[0] - 1): # oriented
raise NotImplementedError
else:
boundaries[subnames[2]] = np.sort(self.t2f[
(1 << np.arange(self.t2f.shape[0]))[:, None]
& data[0].astype(np.int64) > 0
])
return boundaries, subdomains
def boundary_nodes(self) -> ndarray:
"""Return an array of boundary node indices."""
return np.unique(self.facets[:, self.boundary_facets()])
def interior_nodes(self) -> ndarray:
"""Return an array of interior node indices."""
return np.setdiff1d(np.arange(0, self.p.shape[1]),
self.boundary_nodes())
def nodes_satisfying(self,
test: Callable[[ndarray], ndarray],
boundaries_only: bool = False) -> ndarray:
"""Return nodes that satisfy some condition.
Parameters
----------
test
A function which returns ``True`` for the set of nodes that are to
be included in the return set.
boundaries_only
If ``True``, include only boundary facets.
"""
nodes = np.nonzero(test(self.p))[0]
if boundaries_only:
nodes = np.intersect1d(nodes, self.boundary_nodes())
return nodes
def facets_satisfying(self,
test: Callable[[ndarray], ndarray],
boundaries_only: bool = False,
normal: Optional[ndarray] = None) -> ndarray:
"""Return facets whose midpoints satisfy some condition.
Parameters
----------
test
A function which returns ``True`` for the facet midpoints that are
to be included in the return set.
boundaries_only
If ``True``, include only boundary facets.
normal
If given, used to orient the set of facets.
"""
midp = self.p[:, self.facets].mean(axis=1)
facets = np.nonzero(test(midp))[0]
if boundaries_only:
facets = np.intersect1d(facets, self.boundary_facets())
if normal is not None:
tind = self.f2t[0, facets]
mapping = self._mapping()
normals = mapping.normals(np.zeros((self.dim(), 1)),
tind,
facets,
self.t2f).T[0].T
ori = 1 * (np.dot(normal, normals) < 0)
return OrientedBoundary(facets, ori)
return facets
def facets_around(self, elements, flip=False) -> OrientedBoundary:
"""Return the oriented set of facets around a set of elements.
Parameters
----------
elements
An array of element indices or, alternatively, something that can
be cast into one via ``Mesh.normalize_elements``.
flip
If ``True``, use traces outside the subdomain and inward normals.
"""
elements = self.normalize_elements(elements)
facets, counts = np.unique(self.t2f[:, elements], return_counts=True)
facets = facets[counts == 1]
if flip:
ori = np.nonzero(~np.isin(self.f2t[:, facets], elements).T)[1]
else:
ori = np.nonzero(np.isin(self.f2t[:, facets], elements).T)[1]
return OrientedBoundary(facets, ori)
def elements_satisfying(self,
test: Callable[[ndarray], ndarray]) -> ndarray:
"""Return elements whose midpoints satisfy some condition.
Parameters
----------
test
A function which returns ``True`` for the element midpoints that
are to be included in the return set.
"""
midp = self.p[:, self.t].mean(axis=1)
return np.nonzero(test(midp))[0]
def _expand_facets(self, ix: ndarray) -> Tuple[ndarray, ndarray]:
"""Return vertices and edges corresponding to given facet indices.
Parameters
----------
ix
An array of facet indices.
"""
vertices = np.unique(self.facets[:, ix].flatten())
if self.dim() == 3:
edge_candidates = self.t2e[:, self.f2t[0, ix]].flatten()
# subset of edges that share all points with the given facets
subset = np.nonzero(
np.prod(np.isin(self.edges[:, edge_candidates],
self.facets[:, ix].flatten()),
axis=0)
)[0]
edges = np.intersect1d(self.boundary_edges(),
edge_candidates[subset])
else:
edges = np.array([], dtype=np.int64)
return vertices, edges
def _mapping(self):
"""Return a default reference mapping for the mesh."""
from skfem.mapping import MappingAffine, MappingIsoparametric
if not hasattr(self, '_cached_mapping'):
if self.affine:
self._cached_mapping = MappingAffine(self)
else:
self._cached_mapping = MappingIsoparametric(
self,
self.elem(),
self.bndelem,
)
return self._cached_mapping
def _init_facets(self):
"""Initialize ``self.facets``."""
self._facets, self._t2f = self.build_entities(
self.t,
self.elem.refdom.facets,
)
def _init_edges(self):
"""Initialize ``self.edges``."""
self._edges, self._t2e = self.build_entities(
self.t,
self.elem.refdom.edges,
)
def __post_init__(self):
"""Support node orders used in external formats.
We expect ``self.doflocs`` to be ordered based on the
degrees-of-freedom in :class:`skfem.assembly.Dofs`. External formats
for high order meshes commonly use a less strict ordering scheme and
the extra nodes are described as additional rows in ``self.t``. This
method attempts to accommodate external formas by reordering
``self.doflocs`` and changing the indices in ``self.t``.
"""
if self.sort_t:
self.t = np.sort(self.t, axis=0)
self.doflocs = np.asarray(self.doflocs, dtype=np.float64, order="K")
self.t = np.asarray(self.t, dtype=np.int64, order="K")
M = self.elem.refdom.nnodes
if self.nnodes > M:
# reorder DOFs to the expected format: vertex DOFs are first
p, t = self.doflocs, self.t
t_nodes = t[:M]
uniq, ix = np.unique(t_nodes, return_inverse=True)
self.t = (np.arange(len(uniq), dtype=np.int64)[ix]
.reshape(t_nodes.shape))
doflocs = np.hstack((
p[:, uniq],
np.zeros((p.shape[0], np.max(t) + 1 - len(uniq))),
))
doflocs[:, self.dofs.element_dofs[M:].flatten('F')] =\
p[:, t[M:].flatten('F')]
self.doflocs = doflocs
# C_CONTIGUOUS is more performant in dimension-based slices
if not self.doflocs.flags['C_CONTIGUOUS']:
if self.doflocs.shape[1] > 1e3:
logger.warning("Transforming over 1000 vertices "
"to C_CONTIGUOUS.")
self.doflocs = np.ascontiguousarray(self.doflocs)
if not self.t.flags['C_CONTIGUOUS']:
if self.t.shape[1] > 1e3:
logger.warning("Transforming over 1000 elements "
"to C_CONTIGUOUS.")
self.t = np.ascontiguousarray(self.t)
# add boundary tags for the default cubes
if ((self._boundaries is None
and self.doflocs.shape[1] == 2 ** self.dim()
and self.dim() <= 2)):
boundaries = {}
# default boundary names along the dimensions
minnames = ['left', 'bottom', 'front']
maxnames = ['right', 'top', 'back']
for d in range(self.doflocs.shape[0]):
dmin = np.min(self.doflocs[d])
ix = self.facets_satisfying(lambda x: x[d] == dmin)
if len(ix) >= 1:
boundaries[minnames[d]] = ix
for d in range(self.doflocs.shape[0]):
dmax = np.max(self.doflocs[d])
ix = self.facets_satisfying(lambda x: x[d] == dmax)
if len(ix) >= 1:
boundaries[maxnames[d]] = ix
if len(boundaries) > 0:
self._boundaries = boundaries
# run validation
if self.validate and logger.getEffectiveLevel() <= logging.DEBUG:
self.is_valid()
def is_valid(self, raise_=False) -> bool:
"""Perform expensive mesh validation.
Parameters
----------
raise_: raise an exception if the mesh is invalid.
Returns
-------
bool: True if the mesh is valid.
"""
logger.debug("Running mesh validation.")
# check that the shape of doflocs and t are correct
if self.doflocs.shape[0] != self.elem.refdom.dim():
msg = "Mesh.doflocs, the point array, has incorrect shape."
if raise_:
raise ValueError(msg)
logger.debug(msg)
return False
if self.t.shape[0] != self.elem.refdom.nnodes:
msg = "Mesh.t, the element connectivity, has incorrect shape."
if raise_:
raise ValueError(msg)
logger.debug(msg)
return False
# check that there are no duplicate points
tmp = np.ascontiguousarray(self.p.T)
p_unique = np.unique(tmp.view([('', tmp.dtype)] * tmp.shape[1]))
if self.p.shape[1] != p_unique.shape[0]:
msg = "Mesh contains duplicate vertices."
if raise_:
raise ValueError(msg)
logger.debug(msg)
return False
# check that all points are at least in some element
if len(np.setdiff1d(np.arange(self.p.shape[1]),
np.unique(self.t))) > 0:
msg = "Mesh contains a vertex not belonging to any element."
if raise_:
raise ValueError(msg)
logger.debug(msg)
return False
logger.debug("Mesh validation completed with no warnings.")
return True
def __rmatmul__(self, other):
out = self.__matmul__(other)
return out[1:] + out[0:1]
def __matmul__(self, other):
"""Join meshes with different types; return a list of meshes."""
cls = type(self)
if isinstance(other, Mesh):
other = [other]
if isinstance(other, list):
p = np.hstack((self.p,) + tuple([mesh.p for mesh in other]))
pT = np.ascontiguousarray(p.T)
_, ixa, ixb = np.unique(pT.view([('', pT.dtype)] * pT.shape[1]),
return_index=True, return_inverse=True)
p = p[:, ixa]
return [
cls(p, ixb[self.t]),
*[type(m)(p, ixb[m.t + self.p.shape[1]])
for i, m in enumerate(other)],
]
raise NotImplementedError
def __add__(self, other):
"""Join two meshes."""
cls = type(self)
if not isinstance(other, cls):
raise TypeError("Can only join meshes with same type.")
p = np.hstack((self.p.round(decimals=8),
other.p.round(decimals=8)))
t = np.hstack((self.t, other.t + self.p.shape[1]))
tmp = np.ascontiguousarray(p.T)
tmp, ixa, ixb = np.unique(tmp.view([('', tmp.dtype)] * tmp.shape[1]),
return_index=True, return_inverse=True)
p = p[:, ixa]
t = ixb[t]
return cls(p, t)
def __repr__(self):
rep = ""
rep += "<skfem {} object>\n".format(type(self).__name__)
rep += " Number of elements: {}\n".format(self.nelements)
rep += " Number of vertices: {}\n".format(self.nvertices)
rep += " Number of nodes: {}".format(self.p.shape[1])
if self.subdomains is not None:
rep += "\n Named subdomains [# elements]: {}".format(
', '.join(
map(lambda k: '{} [{}]'.format(k, len(self.subdomains[k])),
list(self.subdomains.keys()))
)
)
if self.boundaries is not None:
rep += "\n Named boundaries [# facets]: {}".format(
', '.join(
map(lambda k: '{} [{}]'.format(k, len(self.boundaries[k])),
list(self.boundaries.keys()))
)
)
return rep
def __str__(self):
return self.__repr__()
def save(self,
filename: str,
point_data: Optional[Dict[str, ndarray]] = None,
cell_data: Optional[Dict[str, ndarray]] = None,
**kwargs) -> None:
"""Export the mesh and fields using meshio.
Parameters
----------
filename
The output filename, with suffix determining format;
e.g. .msh, .vtk, .xdmf
point_data
Data related to the vertices of the mesh.
cell_data
Data related to the elements of the mesh.
"""
from skfem.io.meshio import to_file
return to_file(self,
filename,
point_data,
cell_data,
**kwargs)
@classmethod
def load(cls,
filename: str,
out: Optional[List[str]] = None,
**kwargs):
"""Load a mesh using meshio.
Parameters
----------
filename
The filename of the mesh file.
out
Optional list of ``meshio.Mesh`` attribute names, overwrite with
the corresponding data. E.g., ``['point_data', 'cell_data']``.
"""
from skfem.io.meshio import from_file
return from_file(filename,
out,
**kwargs)
@classmethod
def from_dict(cls, d):
from skfem.io.json import from_dict
return from_dict(cls, d)
def to_dict(self):
from skfem.io.json import to_dict
return to_dict(self)
@classmethod
def from_mesh(cls, mesh, t: Optional[ndarray] = None):
"""Reuse an existing mesh by adding the higher order nodes.
Parameters
----------
mesh
The mesh used in the initialization. Connectivity of the new mesh
will match ``mesh.t`` unless ``t`` is given
t
Optionally specify new connectivity for the resulting mesh.
"""
from skfem.assembly import Dofs
mapping = mesh._mapping()
nelem = cls.elem
dofs = Dofs(mesh, nelem())
locs = mapping.F(nelem.doflocs.T)
doflocs = np.zeros((locs.shape[0], dofs.N))
assert dofs.element_dofs is not None
# match mapped dofs and global dof numbering
for itr in range(locs.shape[0]):
for jtr in range(dofs.element_dofs.shape[0]):
doflocs[itr, dofs.element_dofs[jtr]] = locs[itr, :, jtr]
return cls(
doflocs=doflocs,
t=t if t is not None else mesh.t,
)
@classmethod
def init_refdom(cls):
"""Initialize a mesh corresponding to the reference domain."""
return cls(cls.elem.refdom.p, cls.elem.refdom.t, validate=False)
def morphed(self, *args):
"""Morph the mesh using functions."""
p = self.p.copy()
for i, arg in enumerate(args):
if arg is None:
continue
p[i] = arg(self.p)
return replace(self, doflocs=p)
def refined(self, times_or_ix: Union[int, ndarray] = 1):
"""Return a refined mesh.
Parameters
----------
times_or_ix
Either an integer giving the number of uniform refinements or an
array of element indices for adaptive refinement.
"""
m = self
has_boundaries = self.boundaries is not None
if self.subdomains is not None:
logger.warning("Named subdomains invalidated by a call to "
"Mesh.refined()")
if isinstance(times_or_ix, int):
for _ in range(times_or_ix):
m = m._uniform()
else:
m = m._adaptive(times_or_ix)
if has_boundaries and m.boundaries is None:
logger.warning("Named boundaries invalidated by a call to "
"Mesh.refined()")
return m
def scaled(self, factors):
"""Return a new mesh with scaled dimensions.
Parameters
----------
factors
Scale each dimension by a factor.
"""
if isinstance(factors, float):
# for backwards compatibility
factors = self.doflocs.shape[0] * [factors]
return replace(
self,
doflocs=np.array([self.doflocs[itr] * factors[itr]
for itr in range(len(factors))]),
)
def translated(self, diffs):
"""Return a new translated mesh.
Parameters
----------
diffs
Translate the mesh by a vector. Must have same size as the mesh
dimension.
"""
return replace(
self,
doflocs=np.array([self.doflocs[itr] + diffs[itr]
for itr in range(len(diffs))]),
)
def mirrored(self,
normal: Tuple[float, ...],
point: Optional[Tuple[float, ...]] = None):
"""Return a mesh mirrored with respect to a normal.
Meant to be combined with the other methods to build more general
meshes, e.g.,
>>> from skfem import MeshTet
>>> m1 = MeshTet()
>>> m2 = m1.mirrored((1, 0, 0))
>>> m3 = m1.mirrored((0, 1, 0))
>>> m4 = m1.mirrored((0, 0, 1))
>>> m = m1 + m2 + m3 + m4
>>> (m.nvertices, m.nelements)
(20, 20)
Parameters
----------
normal
The normal vector of the mirror plane.
point
An optional point through which the plane passes. By default, the
point corresponds to the origin.
"""
if point is None:
point = (0,) * self.dim()
p = self.p.copy()
p0 = np.array(point)
n = np.array(normal)
n = n / np.linalg.norm(n)
p = p - 2. * np.dot(n, p - p0[:, None]) * n[:, None]
return replace(
self,
doflocs=p,
)
def smoothed(self, fixed_nodes=None):
"""Laplacian smoothing.
Parameters
----------
fixed_nodes
A list of node indices that do not move. By default, the boundary
nodes are fixed.
"""
if fixed_nodes is None:
fixed_nodes = self.boundary_nodes()
p = np.zeros(self.doflocs.shape)
nv = p.shape[1]
edges = self.edges if p.shape[0] == 3 else self.facets
nneighbors = np.bincount(edges.reshape(-1), minlength=nv)
p += np.array([np.bincount(edges[0], pi, minlength=nv)
for pi in self.doflocs[:, edges[1]]])
p += np.array([np.bincount(edges[1], pi, minlength=nv)
for pi in self.doflocs[:, edges[0]]])
p /= nneighbors
p[:, fixed_nodes] = self.doflocs[:, fixed_nodes]
return replace(
self,
doflocs=p,
)
def _uniform(self):
"""Perform a single uniform refinement."""
raise NotImplementedError
def _adaptive(self, ix: ndarray):
"""Adaptively refine the given set of elements."""
raise NotImplementedError
def _splitref(self, nrefs: int = 1):
"""Split mesh into separate nonconnected elements and refine.
Used for visualization purposes.
Parameters
----------
nrefs
The number of refinements.
"""
cls = type(self)
m = cls.init_refdom().refined(nrefs)
X = m.p
x = self._mapping().F(m.p)
# create connectivity for the new mesh
nt = self.nelements
t = np.tile(m.t, (1, nt))
dt = np.max(t)
t += ((dt + 1)
* (np.tile(np.arange(nt), (m.t.shape[0] * m.t.shape[1], 1))
.flatten('F')
.reshape((-1, m.t.shape[0])).T))
if X.shape[0] == 1:
p = np.array([x.flatten()])
else:
p = x[0].flatten()
for itr in range(len(x) - 1):
p = np.vstack((p, x[itr + 1].flatten()))
# Always creates a mesh with duplicate verticies. Use validate=False to
# suppress confusing logger DEBUG messages.
return cls(p, t, validate=False)
@staticmethod
def build_entities(t, indices, sort=True):
"""Build low dimensional topological entities."""
if indices is None:
return None, None
indexing = np.hstack(tuple([t[ix] for ix in indices]))
sorted_indexing = np.sort(indexing, axis=0)
sorted_indexing, ixa, ixb = np.unique(sorted_indexing,
axis=1,
return_index=True,
return_inverse=True)
mapping = ixb.reshape((len(indices), t.shape[1]))
if sort:
return np.ascontiguousarray(sorted_indexing), mapping
return np.ascontiguousarray(indexing[:, ixa]), mapping
@staticmethod
def build_inverse(t, mapping):
"""Build inverse mapping from low dimensional topological entities."""
e = mapping.flatten(order='C')
tix = np.tile(np.arange(t.shape[1]), (1, t.shape[0]))[0]
e_first, ix_first = np.unique(e, return_index=True)
e_last, ix_last = np.unique(e[::-1], return_index=True)
ix_last = e.shape[0] - ix_last - 1
inverse = np.zeros((2, np.max(mapping) + 1), dtype=np.int64)
inverse[0, e_first] = tix[ix_first]
inverse[1, e_last] = tix[ix_last]
inverse[1, np.nonzero(inverse[0] == inverse[1])[0]] = -1
return inverse
@staticmethod
def strip_extra_coordinates(p: ndarray) -> ndarray:
"""Fallback for 3D meshes."""
return p
def param(self) -> float:
"""Return mesh parameter, viz the length of the longest edge."""
raise NotImplementedError
def _reix(self, ix: ndarray) -> Tuple[ndarray, ndarray]:
"""Connect ``self.p`` based on the indices ``ix``."""
ixuniq = np.unique(ix)
t = np.zeros(np.max(ix) + 1, dtype=np.int64)
t[ixuniq] = np.arange(len(ixuniq), dtype=np.int64)
return self.p[:, ixuniq], t[ix]
def remove_elements(self, element_indices: ndarray):
"""Construct a new mesh by removing elements.
Parameters
----------
element_indices
List of element indices to remove.
"""
p, t = self._reix(np.delete(self.t, element_indices, axis=1))
return replace(
self,
doflocs=p,
t=t,
)
def element_finder(self, mapping=None):
"""Return a function handle from location to element index.
Parameters
----------
mapping
The affine mapping for the mesh.
"""
raise NotImplementedError
def draw(self, visuals='matplotlib', **kwargs):
"""Convenience wrapper for skfem.visuals."""
mod = importlib.import_module('skfem.visuals.{}'.format(visuals))
return mod.draw(self, **kwargs)
def plot(self, x, visuals='matplotlib', **kwargs):
"""Convenience wrapper for skfem.visuals."""
mod = importlib.import_module('skfem.visuals.{}'.format(visuals))
return mod.plot(self, x, **kwargs)
def normalize_facets(self, facets) -> ndarray:
"""Generate an array of facet indices.
Parameters
----------
elements
Criteria of which facets to include. Function has different
behavior based on the type of this parameter.
"""
if isinstance(facets, int):
# Make normalize_facets([1,2,3]) have the same behavior as
# normalize_facets(np.array([1,2,3]))
return np.array([facets])
if isinstance(facets, ndarray):
# Assume the facets have already been normalized
return facets
if facets is None:
# Default behavior.
return self.boundary_facets()
elif isinstance(facets, (tuple, list, set)):
# Recurse over the list, building an array of all matching facets
return np.unique(
np.concatenate(
[self.normalize_facets(f) for f in facets]
)
)
elif callable(facets):
# The callable should accept an array of facet centers and return
# an boolean array with True for facets that should be included.