Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

issue with symmetric grids #451

Merged
merged 12 commits into from
Apr 2, 2023
203 changes: 138 additions & 65 deletions desc/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ def __init__(self, nodes, sort=True):

self._nodes, self._spacing = self._create_nodes(nodes)

dtheta_scale = self._enforce_symmetry()
scaled_idx, dtheta_scale = self._enforce_symmetry()
if sort:
self._sort_nodes()
self._find_axis()
self._count_nodes()
self._scale_weights(dtheta_scale)
self._scale_weights(scaled_idx, dtheta_scale)

def _enforce_symmetry(self):
"""Enforce stellarator symmetry.
Expand All @@ -70,33 +70,64 @@ def _enforce_symmetry(self):

Returns
-------
scaled_idx : ndarray
The indices of the nodes which had dtheta scaled by the factor dtheta_scale.
dtheta_scale : ndarray
The multiplicative factor to scale the theta spacing for each theta curve.
The multiplicative factor used to scale the theta spacing for each rho surface.
unalmis marked this conversation as resolved.
Show resolved Hide resolved
number of nodes / (number of nodes - number of nodes to delete)

"""
if self.sym:
non_sym_idx = np.where(self.nodes[:, 1] > np.pi)
__, inverse, nodes_per_rho_surf = np.unique(
self.nodes[:, 0], return_inverse=True, return_counts=True
# indices where theta coordinate is off the symmetry line of theta=0 or pi
off_sym_line_idx = np.where(self.nodes[:, 1] % np.pi != 0)[0]
__, inverse, off_sym_line_per_rho_surf_count = np.unique(
self.nodes[off_sym_line_idx, 0], return_inverse=True, return_counts=True
)
__, non_sym_per_rho_surf = np.unique(
self.nodes[non_sym_idx, 0], return_counts=True
# indices of nodes to be deleted
to_delete_idx = np.where(self.nodes[:, 1] > np.pi)[0]
__, to_delete_per_rho_surf_count = np.unique(
self.nodes[to_delete_idx, 0], return_counts=True
)
if len(nodes_per_rho_surf) > len(non_sym_per_rho_surf):
assert (
off_sym_line_per_rho_surf_count.size
>= to_delete_per_rho_surf_count.size
and 2 * np.pi not in self.nodes[:, 1]
)
if off_sym_line_per_rho_surf_count.size > to_delete_per_rho_surf_count.size:
# edge case where surfaces closest to axis lack theta > pi nodes
pad_count = len(nodes_per_rho_surf) - len(non_sym_per_rho_surf)
non_sym_per_rho_surf = np.pad(non_sym_per_rho_surf, (pad_count, 0))
# assumes number of theta nodes to delete is constant over zeta
scale = nodes_per_rho_surf / (nodes_per_rho_surf - non_sym_per_rho_surf)
# arrange scale factors to match spacing's arbitrary ordering
# The number of nodes to delete on those surfaces is zero.
pad_count = (
off_sym_line_per_rho_surf_count.size
- to_delete_per_rho_surf_count.size
)
to_delete_per_rho_surf_count = np.pad(
to_delete_per_rho_surf_count, (pad_count, 0)
)
# The computation of this scale factor assumes
# 1. number of nodes to delete is constant over zeta
# 2. number of nodes off symmetry line is constant over zeta
# 3. uniform theta spacing between nodes
# The first two assumptions let _per_theta_curve = _per_rho_surf.
# The third assumption lets the scale factor be constant over a
# particular theta curve, so that each node in the open interval
# (0, pi) has its spacing scaled up by the same factor.
# Nodes at endpoints 0, pi should not be scaled.
scale = off_sym_line_per_rho_surf_count / (
off_sym_line_per_rho_surf_count - to_delete_per_rho_surf_count
)
# Arrange scale factors to match spacing's arbitrary ordering.
scale = scale[inverse]

self._spacing[:, 1] *= scale
self._nodes = np.delete(self.nodes, non_sym_idx, axis=0)
self._spacing = np.delete(self.spacing, non_sym_idx, axis=0)
return np.delete(scale, non_sym_idx)
return 1
dtheta_scale = np.ones_like(self._spacing[:, 1])
dtheta_scale[off_sym_line_idx] = scale

# Scale up all nodes so that their spacing accounts for the node
# that is their reflection across the symmetry line.
self._spacing[off_sym_line_idx, 1] *= scale
self._nodes = np.delete(self.nodes, to_delete_idx, axis=0)
self._spacing = np.delete(self.spacing, to_delete_idx, axis=0)
scaled_idx = np.where(self.nodes[:, 1] % np.pi != 0)[0]
return scaled_idx, dtheta_scale[scaled_idx]
return np.arange(0), np.array([1])

def _sort_nodes(self):
"""Sort nodes for use with FFT."""
Expand All @@ -123,14 +154,16 @@ def _count_nodes(self):
self._num_theta = self._unique_theta_idx.size
self._num_zeta = self._unique_zeta_idx.size

def _scale_weights(self, dtheta_scale):
def _scale_weights(self, scaled_idx, dtheta_scale):
"""Scale weights sum to full volume and reduce weights for duplicated nodes.

Parameters
----------
scaled_idx : ndarray
The indices of the nodes which had dtheta scaled by the factor dtheta_scale.
dtheta_scale : ndarray
The multiplicative factor to scale the theta spacing for each theta curve.

The multiplicative factor used to scale the theta spacing for each rho surface.
unalmis marked this conversation as resolved.
Show resolved Hide resolved
number of nodes / (number of nodes - number of nodes to delete)
"""
nodes = self.nodes.copy().astype(float)
nodes[:, 1] %= 2 * np.pi
Expand All @@ -143,7 +176,7 @@ def _scale_weights(self, dtheta_scale):
temp_spacing = np.copy(self.spacing)
temp_spacing /= duplicates ** (1 / 3)
# assign weights pretending _enforce_symmetry didn't change theta spacing
temp_spacing[:, 1] /= dtheta_scale
temp_spacing[scaled_idx, 1] /= dtheta_scale
# scale weights sum to full volume
temp_spacing *= (4 * np.pi**2 / temp_spacing.prod(axis=1).sum()) ** (1 / 3)
self._weights = temp_spacing.prod(axis=1)
Expand All @@ -164,6 +197,7 @@ def _scale_weights(self, dtheta_scale):
# scale areas sum to full area
# The following operation is not a general solution to return the weight
# removed from the duplicate nodes back to the unique nodes.
# (For the 3 predefined grid types this line of code has no effect).
# For this reason, duplicates should typically be deleted rather than rescaled.
# Note we multiply each column by duplicates^(1/6) to account for the extra
# division by duplicates^(1/2) in one of the columns above.
Expand All @@ -188,8 +222,8 @@ def _create_nodes(self, nodes):

"""
nodes = np.atleast_2d(nodes).reshape((-1, 3)).astype(float)
nodes[nodes[:, 1] > 2 * np.pi, 1] %= 2 * np.pi
nodes[nodes[:, 2] > 2 * np.pi / self.NFP, 2] %= 2 * np.pi / self.NFP
nodes[nodes[:, 1] != 2 * np.pi, 1] %= 2 * np.pi
nodes[nodes[:, 2] != 2 * np.pi / self.NFP, 2] %= 2 * np.pi / self.NFP
spacing = ( # make weights sum to 4pi^2
np.ones_like(nodes) * np.array([1, 2 * np.pi, 2 * np.pi]) / nodes.shape[0]
)
Expand Down Expand Up @@ -375,7 +409,7 @@ def __init__(
self._NFP = NFP
self._axis = axis
self._sym = sym
self._endpoint = endpoint
self._endpoint = bool(endpoint)
self._node_pattern = "linear"

self._nodes, self._spacing = self._create_nodes(
Expand All @@ -390,11 +424,11 @@ def __init__(
zeta=zeta,
)

dtheta_scale = self._enforce_symmetry()
# symmetry handled in create_nodes()
self._sort_nodes()
self._find_axis()
self._count_nodes()
self._scale_weights(dtheta_scale)
self._scale_weights(scaled_idx=np.arange(0), dtheta_scale=np.array([1]))

def _create_nodes( # noqa: C901
self,
Expand Down Expand Up @@ -477,9 +511,21 @@ def _create_nodes( # noqa: C901
else:
self._M = len(np.atleast_1d(theta))
if np.isscalar(theta) and (int(theta) == theta) and theta > 0:
t = np.linspace(0, 2 * np.pi, int(theta), endpoint=endpoint)
if self.sym and t.size > 1:
theta = int(theta)
if self.sym and theta > 1:
# Enforce that no node lies on theta=0 or theta=2pi, so that
# each node has a symmetric counterpart, and
# that, for all i, t[i]-t[i-1] = 2 t[0] = 2 (pi - t[last node before pi]).
unalmis marked this conversation as resolved.
Show resolved Hide resolved
# Both conditions necessary to evenly space nodes with constant dt.
# This can be done by making (theta + endpoint) an even integer.
if (theta + endpoint) % 2 != 0:
theta += 1
t = np.linspace(0, 2 * np.pi, theta, endpoint=endpoint)
t += t[1] / 2
# delete theta > pi nodes
t = t[: np.searchsorted(t, np.pi, side="right")]
else:
t = np.linspace(0, 2 * np.pi, theta, endpoint=endpoint)
dt = 2 * np.pi / t.size * np.ones_like(t)
if (endpoint and not self.sym) and t.size > 1:
# increase node weight to account for duplicate node
Expand All @@ -489,33 +535,60 @@ def _create_nodes( # noqa: C901
else:
t = np.atleast_1d(theta).astype(float)
# need to sort to compute correct spacing
t[t > 2 * np.pi] %= 2 * np.pi
SUP = 2 * np.pi # supremum
t[t != SUP] %= SUP
t = np.sort(t)
if self.sym:
# delete theta > pi nodes
t = t[: np.searchsorted(t, np.pi, side="right")]
dt = np.zeros_like(t)
if t.size > 1:
# choose dt to be half the cyclic distance of the surrounding two nodes
SUP = 2 * np.pi # supremum
dt[0] = t[1] + (SUP - (t[-1] % SUP)) % SUP
# choose dt to be the cyclic distance of the surrounding two nodes
dt[1:-1] = t[2:] - t[:-2]
dt[-1] = t[0] + (SUP - (t[-2] % SUP)) % SUP
dt /= 2
if t.size == 2:
dt[-1] = dt[0]
if t[0] == 0 and t[-1] == SUP:
# The cyclic distance algorithm above correctly weights
# the duplicate endpoint node spacing at theta = 0 and 2pi
# to be half the weight of the other nodes.
if not self.sym:
if not self.sym:
dt[0] = t[1] + (SUP - t[-1]) % SUP
dt[-1] = t[0] + (SUP - t[-2]) % SUP
dt /= 2 # choose dt to be half the cyclic distance
if t.size == 2:
assert dt[0] == np.pi and dt[-1] == 0
dt[-1] = dt[0]
if t[0] == 0 and t[-1] == SUP:
# The cyclic distance algorithm above correctly weights
# the duplicate endpoint node spacing at theta = 0 and 2pi
# to be half the weight of the other nodes.
# However, scale_weights() is not aware of this, so we
# counteract the reduction that will be done there.
dt[0] += dt[-1]
dt[-1] = dt[0]
else:
first_positive_idx = np.searchsorted(t, 0, side="right")
if first_positive_idx == 0:
# then there are no nodes at theta=0
dt[0] = t[0] + t[1]
else:
# Symmetry deletion will delete the duplicate node at 2pi.
# Need to move weight from non-duplicate nodes back to the
# node at theta = 0 pi.
dt[0] += dt[-1]
dt *= (t.size - 1) / t.size
# total spacing of nodes at theta=0 should be half the
# distance between first positive node and its
# reflection across the theta=0 line.
dt[0] = t[first_positive_idx]
assert (first_positive_idx == 1) or (
dt[0] == dt[first_positive_idx - 1]
)
# If the first condition is false and the latter true, then both of those dt
unalmis marked this conversation as resolved.
Show resolved Hide resolved
# should be halved. The scale_weights() function will handle this.
unalmis marked this conversation as resolved.
Show resolved Hide resolved
first_pi_idx = np.searchsorted(t, np.pi, side="left")
if first_pi_idx == t.size:
# then there are no nodes at theta=pi
dt[-1] = (SUP - t[-1]) - t[-2]
else:
# total spacing of nodes at theta=pi should be half the
# distance between first node < pi and its
# reflection across the theta=pi line.
dt[-1] = (SUP - t[-1]) - t[first_pi_idx - 1]
assert (first_pi_idx == t.size - 1) or (
dt[first_pi_idx] == dt[-1]
)
# If the first condition is false and the latter true, then both of those dt
unalmis marked this conversation as resolved.
Show resolved Hide resolved
# should be halved. The scale_weights() function will handle this.
unalmis marked this conversation as resolved.
Show resolved Hide resolved
else:
dt = np.array([2 * np.pi])

Expand All @@ -538,15 +611,15 @@ def _create_nodes( # noqa: C901
else:
z = np.atleast_1d(zeta).astype(float)
# need to sort to compute correct spacing
z[z > 2 * np.pi / self.NFP] %= 2 * np.pi / self.NFP
SUP = 2 * np.pi / self.NFP # supremum
z[z != SUP] %= SUP
z = np.sort(z)
dz = np.zeros_like(z)
if z.size > 1:
# choose dz to be half the cyclic distance of the surrounding two nodes
SUP = 2 * np.pi / self.NFP # supremum
dz[0] = z[1] + (SUP - (z[-1] % SUP)) % SUP
dz[0] = z[1] + (SUP - z[-1]) % SUP
dz[1:-1] = z[2:] - z[:-2]
dz[-1] = z[0] + (SUP - (z[-2] % SUP)) % SUP
dz[-1] = z[0] + (SUP - z[-2]) % SUP
dz /= 2
dz *= self.NFP
if z.size == 2:
Expand All @@ -561,8 +634,11 @@ def _create_nodes( # noqa: C901
else:
dz = np.array([2 * np.pi])

self._endpoint = (t[0] == 0 and t[-1] == 2 * np.pi) and (
z[0] == 0 and z[-1] == 2 * np.pi / self.NFP
self._endpoint = (
t.size > 0
and z.size > 0
and (t[0] == 0 and t[-1] == 2 * np.pi)
and (z[0] == 0 and z[-1] == 2 * np.pi / self.NFP)
)

r, t, z = np.meshgrid(r, t, z, indexing="ij")
Expand Down Expand Up @@ -608,10 +684,10 @@ def change_resolution(self, L, M, N, NFP=None):
axis=len(self.axis) > 0,
endpoint=self.endpoint,
)
dtheta_scale = self._enforce_symmetry()
scaled_idx, dtheta_scale = self._enforce_symmetry()
self._sort_nodes()
self._find_axis()
self._scale_weights(dtheta_scale)
self._scale_weights(scaled_idx, dtheta_scale)

@property
def endpoint(self):
Expand Down Expand Up @@ -731,12 +807,9 @@ def change_resolution(self, L, M, N, NFP=None):
self._M = M
self._N = N
self._nodes, self._spacing = self._create_nodes(L=L, M=M, N=N, NFP=self.NFP)
dtheta_scale = self._enforce_symmetry()
self._sort_nodes()
self._find_axis()
temp_spacing = np.copy(self.spacing)
temp_spacing[:, 1] /= dtheta_scale
self._weights = temp_spacing.prod(axis=1) # instead of _scale_weights
self._weights = self.spacing.prod(axis=1) # instead of _scale_weights


class ConcentricGrid(Grid):
Expand Down Expand Up @@ -792,11 +865,11 @@ def __init__(self, L, M, N, NFP=1, sym=False, axis=False, node_pattern="jacobi")
node_pattern=self.node_pattern,
)

dtheta_scale = self._enforce_symmetry()
scaled_idx, dtheta_scale = self._enforce_symmetry()
self._sort_nodes()
self._find_axis()
self._count_nodes()
self._scale_weights(dtheta_scale)
self._scale_weights(scaled_idx, dtheta_scale)

def _create_nodes(self, L, M, N, NFP=1, axis=False, node_pattern="jacobi"):
"""Create grid nodes and weights.
Expand Down Expand Up @@ -937,10 +1010,10 @@ def change_resolution(self, L, M, N, NFP=None):
axis=len(self.axis) > 0,
node_pattern=self.node_pattern,
)
dtheta_scale = self._enforce_symmetry()
scaled_idx, dtheta_scale = self._enforce_symmetry()
self._sort_nodes()
self._find_axis()
self._scale_weights(dtheta_scale)
self._scale_weights(scaled_idx, dtheta_scale)


# these functions are currently unused ---------------------------------------
Expand Down
Loading