Skip to content

Commit

Permalink
Merge pull request #242 from boutproject/interpolate-cartesian
Browse files Browse the repository at this point in the history
Interpolate to a Cartesian grid
  • Loading branch information
johnomotani authored Aug 3, 2022
2 parents ceb75b3 + da78317 commit fe0e8ee
Show file tree
Hide file tree
Showing 7 changed files with 410 additions and 4 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pythonpackage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:
matrix:
python-version: [3.7, 3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: false

steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pythonpublish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
matrix:
python-version: [3.7, 3.8]
pip-packages:
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
fail-fast: true

steps:
Expand Down
53 changes: 52 additions & 1 deletion xbout/boutdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,11 @@
from .plotting import plotfuncs
from .plotting.utils import _create_norm
from .region import _from_region
from .utils import _update_metadata_increased_resolution, _get_bounding_surfaces
from .utils import (
_add_cartesian_coordinates,
_update_metadata_increased_resolution,
_get_bounding_surfaces,
)


@register_dataarray_accessor("bout")
Expand Down Expand Up @@ -381,6 +385,17 @@ def interpolate_parallel(

return da

def add_cartesian_coordinates(self):
"""
Add Cartesian (X,Y,Z) coordinates.
Returns
-------
DataArray with new coordinates added, which are named 'X_cartesian',
'Y_cartesian', and 'Z_cartesian'
"""
return _add_cartesian_coordinates(self.data)

def remove_yboundaries(self, return_dataset=False, remove_extra_upper=False):
"""
Remove y-boundary points, if present, from the DataArray
Expand Down Expand Up @@ -1001,6 +1016,42 @@ def interpolate_from_unstructured(

return result

def interpolate_to_cartesian(self, *args, **kwargs):
"""
Interpolate the DataArray to a regular Cartesian grid.
This method is intended to be used to produce data for visualisation, which
normally does not require double-precision values, so by default the data is
converted to `np.float32`. Pass `use_float32=False` to retain the original
precision.
Parameters
----------
nX : int (default 300)
Number of grid points in the X direction
nY : int (default 300)
Number of grid points in the Y direction
nZ : int (default 100)
Number of grid points in the Z direction
use_float32 : bool (default True)
Downgrade precision to `np.float32`?
fill_value : float (default np.nan)
Value to use for points outside the interpolation domain (passed to
`scipy.RegularGridInterpolator`)
See Also
--------
BoutDataset.interpolate_to_cartesian
"""
da = self.data
name = da.name
ds = da.to_dataset()
# Dataset needs geometry and metadata attributes, but these are not copied from
# the DataArray by default
ds.attrs["geometry"] = da.geometry
ds.attrs["metadata"] = da.metadata
return ds.bout.interpolate_to_cartesian(*args, **kwargs)[name]

# BOUT-specific plotting functionality: methods that plot on a poloidal (R-Z) plane
def contour(self, ax=None, **kwargs):
"""
Expand Down
171 changes: 170 additions & 1 deletion xbout/boutdataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@
_parse_coord_option,
)
from .region import _from_region
from .utils import _get_bounding_surfaces, _split_into_restarts
from .utils import (
_add_cartesian_coordinates,
_get_bounding_surfaces,
_split_into_restarts,
)


@xr.register_dataset_accessor("bout")
Expand Down Expand Up @@ -542,6 +546,171 @@ def interpolate_from_unstructured(

return ds

def interpolate_to_cartesian(
self, nX=300, nY=300, nZ=100, *, use_float32=True, fill_value=np.nan
):
"""
Interpolate the Dataset to a regular Cartesian grid.
This method is intended to be used to produce data for visualisation, which
normally does not require double-precision values, so by default the data is
converted to `np.float32`. Pass `use_float32=False` to retain the original
precision.
Parameters
----------
nX : int (default 300)
Number of grid points in the X direction
nY : int (default 300)
Number of grid points in the Y direction
nZ : int (default 100)
Number of grid points in the Z direction
use_float32 : bool (default True)
Downgrade precision to `np.float32`?
fill_value : float (default np.nan)
Value to use for points outside the interpolation domain (passed to
`scipy.RegularGridInterpolator`)
See Also
--------
BoutDataArray.interpolate_to_cartesian
"""
ds = self.data
ds = ds.bout.add_cartesian_coordinates()

if not isinstance(use_float32, bool):
raise ValueError(f"use_float32 must be a bool, got '{use_float32}'")
if use_float32:
float_type = np.float32
ds = ds.astype(float_type)
for coord in ds.coords:
# Coordinates are not converted by Dataset.astype, so convert explicitly
ds[coord] = ds[coord].astype(float_type)
fill_value = float_type(fill_value)
else:
float_type = ds[ds.data_vars[0]].dtype

tdim = ds.metadata["bout_tdim"]
zdim = ds.metadata["bout_zdim"]
if tdim in ds.dims:
nt = ds.sizes[tdim]
n_toroidal = ds.sizes[zdim]

# Create Cartesian grid to interpolate to
Xmin = ds["X_cartesian"].min()
Xmax = ds["X_cartesian"].max()
Ymin = ds["Y_cartesian"].min()
Ymax = ds["Y_cartesian"].max()
Zmin = ds["Z_cartesian"].min()
Zmax = ds["Z_cartesian"].max()
newX_1d = xr.DataArray(np.linspace(Xmin, Xmax, nX), dims="X")
newX = newX_1d.expand_dims({"Y": nY, "Z": nZ}, axis=[1, 2])
newY_1d = xr.DataArray(np.linspace(Ymin, Ymax, nY), dims="Y")
newY = newY_1d.expand_dims({"X": nX, "Z": nZ}, axis=[0, 2])
newZ_1d = xr.DataArray(np.linspace(Zmin, Zmax, nZ), dims="Z")
newZ = newZ_1d.expand_dims({"X": nX, "Y": nY}, axis=[0, 1])
newR = np.sqrt(newX**2 + newY**2)
newzeta = np.arctan2(newY, newX)
# Define newzeta in range 0->2*pi
newzeta = np.where(newzeta < 0.0, newzeta + 2.0 * np.pi, newzeta)

from scipy.interpolate import (
RegularGridInterpolator,
griddata,
)

# Create Cylindrical coordinates for intermediate grid
Rcyl_min = float_type(ds["R"].min())
Rcyl_max = float_type(ds["R"].max())
Zcyl_min = float_type(ds["Z"].min())
Zcyl_max = float_type(ds["Z"].max())
n_Rcyl = int(round(nZ * (Rcyl_max - Rcyl_min) / (Zcyl_max - Zcyl_min)))
Rcyl = xr.DataArray(np.linspace(Rcyl_min, Rcyl_max, 2 * n_Rcyl), dims="r")
Zcyl = xr.DataArray(np.linspace(Zcyl_min, Zcyl_max, 2 * nZ), dims="z")

# Create Dataset for result
result = xr.Dataset()
result.attrs["metadata"] = ds.metadata

# Interpolate in two stages for efficiency. Unstructured 3d interpolation is
# very slow. Unstructured 2d interpolation onto Cartesian (R, Z) grids, followed
# by structured 3d interpolation onto the (X, Y, Z) grid, is much faster.
# Structured 3d interpolation straight from (psi, theta, zeta) to (X, Y, Z)
# leaves artifacts in the output, because theta does not vary continuously
# everywhere (has branch cuts).

zeta_out = np.zeros(n_toroidal + 1)
zeta_out[:-1] = ds[zdim].values
zeta_out[-1] = zeta_out[-2] + ds["dz"].mean()

def interp_single_time(da):
print(" interpolate poloidal planes")

da_cyl = da.bout.interpolate_from_unstructured(R=Rcyl, Z=Zcyl).transpose(
"R", "Z", zdim, missing_dims="ignore"
)

if zdim not in da_cyl.dims:
da_cyl = da_cyl.expand_dims({zdim: n_toroidal + 1}, axis=-1)
else:
# Impose toroidal periodicity by appending zdim=0 to end of array
da_cyl = xr.concat((da_cyl, da_cyl.isel({zdim: 0})), zdim)

print(" build 3d interpolator")
interp = RegularGridInterpolator(
(Rcyl.values, Zcyl.values, zeta_out),
da_cyl.values,
bounds_error=False,
fill_value=fill_value,
)

print(" do 3d interpolation")
return interp(
(newR, newZ, newzeta),
method="linear",
)

for name, da in ds.data_vars.items():
print(f"\ninterpolating {name}")
# order of dimensions does not really matter here - output only depends on
# shape of newR, newZ, newzeta. Possibly more efficient to assign the 2d
# results in the loop to the last two dimensions, so put zeta first. Can't
# just use da.min().item() here (to get a scalar value instead of a
# zero-size array) because .item() doesn't work for dask arrays (yet!).

datamin = float_type(da.min().values)
datamax = float_type(da.max().values)

if tdim in da.dims:
data_cartesian = np.zeros((nt, nX, nY, nZ), dtype=float_type)
for tind in range(nt):
print(f" tind={tind}")
data_cartesian[tind, :, :, :] = interp_single_time(
da.isel({tdim: tind})
)
result[name] = xr.DataArray(data_cartesian, dims=[tdim, "X", "Y", "Z"])
else:
data_cartesian = interp_single_time(da)
result[name] = xr.DataArray(data_cartesian, dims=["X", "Y", "Z"])

# Copy metadata to data variables, in case it is needed
result[name].attrs["metadata"] = ds.metadata

result = result.assign_coords(X=newX_1d, Y=newY_1d, Z=newZ_1d)

return result

def add_cartesian_coordinates(self):
"""
Add Cartesian (X,Y,Z) coordinates.
Returns
-------
Dataset with new coordinates added, which are named 'X_cartesian',
'Y_cartesian', and 'Z_cartesian'
"""
return _add_cartesian_coordinates(self.data)

def remove_yboundaries(self, **kwargs):
"""
Remove y-boundary points, if present, from the Dataset
Expand Down
79 changes: 79 additions & 0 deletions xbout/tests/test_boutdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -954,6 +954,85 @@ def test_interpolate_parallel_toroidal_points_list(self, bout_xyt_example_files)

xrt.assert_identical(n_highres_truncated, n_highres.isel(zeta=points_list))

def test_interpolate_to_cartesian(self, bout_xyt_example_files):
dataset_list = bout_xyt_example_files(
None, lengths=(2, 16, 17, 18), nxpe=1, nype=1, nt=1
)
with pytest.warns(UserWarning):
ds = open_boutdataset(
datapath=dataset_list, inputfilepath=None, keep_xboundaries=False
)

ds["psixy"] = ds["g11"].copy(deep=True)
ds["Rxy"] = ds["g11"].copy(deep=True)
ds["Zxy"] = ds["g11"].copy(deep=True)

r = np.linspace(1.0, 2.0, ds.metadata["nx"])
theta = np.linspace(0.0, 2.0 * np.pi, ds.metadata["ny"])
R = r[:, np.newaxis] * np.cos(theta[np.newaxis, :])
Z = r[:, np.newaxis] * np.sin(theta[np.newaxis, :])
ds["Rxy"].values[:] = R
ds["Zxy"].values[:] = Z

ds = apply_geometry(ds, "toroidal")

da = ds["n"]
da.values[:] = 1.0

nX = 30
nY = 30
nZ = 10
da_cartesian = da.bout.interpolate_to_cartesian(nX, nY, nZ)

# Check a point inside the original grid
npt.assert_allclose(
da_cartesian.isel(t=0, X=round(nX * 4 / 5), Y=nY // 2, Z=nZ // 2).item(),
1.0,
rtol=1.0e-15,
atol=1.0e-15,
)
# Check a point outside the original grid
assert np.isnan(da_cartesian.isel(t=0, X=0, Y=0, Z=0).item())
# Check output is float32
assert da_cartesian.dtype == np.float32

def test_add_cartesian_coordinates(self, bout_xyt_example_files):
dataset_list = bout_xyt_example_files(None, nxpe=1, nype=1, nt=1)
with pytest.warns(UserWarning):
ds = open_boutdataset(
datapath=dataset_list, inputfilepath=None, keep_xboundaries=False
)

ds["psixy"] = ds["g11"].copy(deep=True)
ds["Rxy"] = ds["g11"].copy(deep=True)
ds["Zxy"] = ds["g11"].copy(deep=True)

r = np.linspace(1.0, 2.0, ds.metadata["nx"])
theta = np.linspace(0.0, 2.0 * np.pi, ds.metadata["ny"])
R = r[:, np.newaxis] * np.cos(theta[np.newaxis, :])
Z = r[:, np.newaxis] * np.sin(theta[np.newaxis, :])
ds["Rxy"].values[:] = R
ds["Zxy"].values[:] = Z

ds = apply_geometry(ds, "toroidal")

zeta = ds["zeta"].values

da = ds["n"].bout.add_cartesian_coordinates()

npt.assert_allclose(
da["X_cartesian"],
R[:, :, np.newaxis] * np.cos(zeta[np.newaxis, np.newaxis, :]),
)
npt.assert_allclose(
da["Y_cartesian"],
R[:, :, np.newaxis] * np.sin(zeta[np.newaxis, np.newaxis, :]),
)
npt.assert_allclose(
da["Z_cartesian"],
Z[:, :, np.newaxis] * np.ones(ds.metadata["nz"])[np.newaxis, np.newaxis, :],
)

def test_ddx(self, bout_xyt_example_files):

nx = 64
Expand Down
Loading

0 comments on commit fe0e8ee

Please sign in to comment.