Skip to content
This repository has been archived by the owner on Aug 29, 2023. It is now read-only.

Commit

Permalink
#393 added error handling for local config reading
Browse files Browse the repository at this point in the history
  • Loading branch information
Krzysztof (Chris) Bernat committed Sep 25, 2017
1 parent 347dce5 commit 163bd3a
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 16 deletions.
28 changes: 28 additions & 0 deletions cate/core/ds.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
import glob
from abc import ABCMeta, abstractmethod
from math import ceil, sqrt
import sys
from typing import Sequence, Optional, Union, Tuple, Any

import xarray as xr
Expand Down Expand Up @@ -421,6 +422,33 @@ def _repr_html_(self):
DATA_STORE_REGISTRY = DataStoreRegistry()


# noinspection PyArgumentList
class DataSourceInitializationError(Exception):
"""
Exceptions produced by Cate's DataStore during DataSource initialization.
"""
def __init__(self, cause, *args, **kwargs):
if isinstance(cause, Exception):
super(DataSourceInitializationError, self).__init__(str(cause), *args, **kwargs)
_, _, traceback = sys.exc_info()
self.with_traceback(traceback)
elif isinstance(cause, str):
super(DataSourceInitializationError, self).__init__(cause, *args, **kwargs)
else:
super(DataSourceInitializationError, self).__init__(*args, **kwargs)
self._cause = cause

@property
def cause(self):
return self._cause


class DataSourceInitializationWarning(UserWarning):
"""
Warnings produced by Cate's DataStore during DataSource initialization.
"""


def find_data_sources(data_stores: Union[DataStore, Sequence[DataStore]] = None,
ds_id: str = None,
query_expr: str = None) -> Sequence[DataSource]:
Expand Down
51 changes: 35 additions & 16 deletions cate/ds/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import psutil
import shutil
import uuid
import warnings
import xarray as xr
from collections import OrderedDict
from datetime import datetime
Expand All @@ -53,7 +54,8 @@

from cate.conf import get_config_value, get_data_stores_path
from cate.conf.defaults import NETCDF_COMPRESSION_LEVEL
from cate.core.ds import DATA_STORE_REGISTRY, DataStore, DataSource, open_xarray_dataset
from cate.core.ds import DATA_STORE_REGISTRY, DataStore, DataSource, DataSourceInitializationError, \
DataSourceInitializationWarning, open_xarray_dataset
from cate.core.types import Polygon, PolygonLike, TimeRange, TimeRangeLike, VarNames, VarNamesLike
from cate.util.monitor import Monitor

Expand Down Expand Up @@ -653,16 +655,18 @@ def __init__(self, ds_id: str, store_dir: str):

def add_pattern(self, data_source_id: str, files: Union[str, Sequence[str]] = None) -> 'DataSource':
data_source = self.create_data_source(data_source_id)
if isinstance(files, str):
if isinstance(files, str) and len(files) > 0:
files = [files]
is_first_file = True
if files:
for file in files:
if is_first_file:
data_source.add_dataset(file, extract_meta_info=True)
is_first_file = False
else:
data_source.add_dataset(file)
if not files:
raise ValueError("files pattern cannot be empty")
for file in files:
if is_first_file:
data_source.add_dataset(file, extract_meta_info=True)
is_first_file = False
else:
data_source.add_dataset(file)

self.register_ds(data_source)
return data_source

Expand Down Expand Up @@ -787,17 +791,28 @@ def _repr_html_(self):
rows.append('<tr><td><strong>%s</strong></td><td>%s</td></tr>' % (row_count, data_source._repr_html_()))
return '<p>Contents of LocalFilePatternDataStore "%s"</p><table>%s</table>' % (self.id, '\n'.join(rows))

def _init_data_sources(self):
def _init_data_sources(self, skip_broken: bool=True):
"""
:param skip_broken: In case of broken data sources skip loading and log warning instead of rising Error.
:return:
"""
if self._data_sources:
return
os.makedirs(self._store_dir, exist_ok=True)
json_files = [f for f in os.listdir(self._store_dir)
if os.path.isfile(os.path.join(self._store_dir, f)) and f.endswith('.json')]
self._data_sources = []
for json_file in json_files:
data_source = self._load_data_source(os.path.join(self._store_dir, json_file))
if data_source:
self._data_sources.append(data_source)
try:
data_source = self._load_data_source(os.path.join(self._store_dir, json_file))
if data_source:
self._data_sources.append(data_source)
except DataSourceInitializationError as e:
if skip_broken:
warnings.warn(e.cause, DataSourceInitializationWarning, stacklevel=0)
else:
raise e

def save_data_source(self, data_source, unlock: bool = False):
self._save_data_source(data_source)
Expand All @@ -822,9 +837,13 @@ def _load_data_source(self, json_path):
@staticmethod
def _load_json_file(json_path: str):
if os.path.isfile(json_path):
with open(json_path) as fp:
return json.load(fp=fp) or {}
return None
try:
with open(json_path) as fp:
return json.load(fp=fp) or {}
except json.decoder.JSONDecodeError:
raise DataSourceInitializationError("Cannot load data source config, {}".format(json_path))
else:
raise DataSourceInitializationError("Data source config does not exists, {}".format(json_path))

@staticmethod
def _json_default_serializer(obj):
Expand Down

0 comments on commit 163bd3a

Please sign in to comment.