Skip to content

Commit

Permalink
Update filesystems.py
Browse files Browse the repository at this point in the history
  • Loading branch information
gmaze committed Oct 25, 2024
1 parent dca8524 commit 23f5f52
Showing 1 changed file with 25 additions and 6 deletions.
31 changes: 25 additions & 6 deletions argopy/stores/filesystems.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,7 +775,13 @@ def make_request(
return data

def open_dataset(
self, url, errors: str = "raise", lazy: bool = False, dwn_opts: dict = {}, xr_opts: dict = {}, **kwargs
self,
url,
errors: str = "raise",
lazy: bool = False,
dwn_opts: dict = {},
xr_opts: dict = {},
**kwargs,
) -> xr.Dataset:
"""Create a :class:`xarray.Dataset` from an url pointing to a netcdf file
Expand Down Expand Up @@ -822,7 +828,8 @@ def open_dataset(
--------
:func:`httpstore.open_mfdataset`
"""
def load_in_memory(url, errors='raise', dwn_opts={}, xr_opts={}):

def load_in_memory(url, errors="raise", dwn_opts={}, xr_opts={}):
data = self.download_url(url, **dwn_opts)
if data is None:
if errors == "raise":
Expand All @@ -848,7 +855,7 @@ def load_in_memory(url, errors='raise', dwn_opts={}, xr_opts={}):

return data, xr_opts

def load_lazily(url, errors='raise', dwn_opts={}, xr_opts={}):
def load_lazily(url, errors="raise", dwn_opts={}, xr_opts={}):
from . import ArgoKerchunker

if "ak" not in kwargs:
Expand All @@ -874,12 +881,18 @@ def load_lazily(url, errors='raise', dwn_opts={}, xr_opts={}):
warnings.warn(
"This url does not support byte range requests so we cannot load lazily, hence falling back on loading in memory"
)
return load_in_memory(url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts)
return load_in_memory(
url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts
)

if not lazy:
target, _ = load_in_memory(url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts)
target, _ = load_in_memory(
url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts
)
else:
target, xr_opts = load_lazily(url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts)
target, xr_opts = load_lazily(
url, errors=errors, dwn_opts=dwn_opts, xr_opts=xr_opts
)

ds = xr.open_dataset(target, **xr_opts)

Expand Down Expand Up @@ -1191,6 +1204,12 @@ def open_mfdataset(

urls = [self.curateurl(url) for url in urls]

if "lazy" in open_dataset_opts and open_dataset_opts["lazy"] and concat:
warnings.warn(
"Lazy openning and concatenate multiple netcdf files is not yet supported. Ignoring the 'lazy' option."
)
open_dataset_opts["lazy"] = False

results = []
failed = []

Expand Down

0 comments on commit 23f5f52

Please sign in to comment.