diff --git a/cate/ds/esa_cci_odp.py b/cate/ds/esa_cci_odp.py
index 1847279bb..a77c6e0cf 100644
--- a/cate/ds/esa_cci_odp.py
+++ b/cate/ds/esa_cci_odp.py
@@ -898,6 +898,9 @@ def make_local(self,
local_ds = local_store.create_data_source(local_name, region, _REFERENCE_DATA_SOURCE_TYPE, self.id,
time_range, var_names, meta_info=local_meta_info, lock_file=True)
self._make_local(local_ds, time_range, region, var_names, monitor=monitor)
+ if local_ds.is_empty:
+ local_store.remove_data_source(local_ds)
+ return None
return local_ds
def _init_file_list(self, monitor: Monitor = Monitor.NONE):
diff --git a/cate/ds/local.py b/cate/ds/local.py
index 3ca1f9f2c..31ca38d4f 100644
--- a/cate/ds/local.py
+++ b/cate/ds/local.py
@@ -352,6 +352,8 @@ def make_local(self,
local_ds = local_store.create_data_source(local_name, region, _REFERENCE_DATA_SOURCE_TYPE, self.id,
meta_info=self.meta_info)
self._make_local(local_ds, time_range, region, var_names, monitor)
+ if local_ds.is_empty:
+ local_store.remove_data_source(local_ds)
return local_ds
def update_local(self,
@@ -485,6 +487,14 @@ def variables_info(self):
def info_string(self):
return 'Files: %s' % (' '.join(self._files))
+ @property
+ def is_empty(self) -> []:
+ """
+ Check if DataSource is empty
+
+ """
+ return not self._files or len(self._files) == 0
+
def _repr_html_(self):
import html
return '
\n' \
@@ -570,11 +580,12 @@ def add_pattern(self, data_source_id: str, files: Union[str, Sequence[str]] = No
data_source.add_dataset(file)
return data_source
- def remove_data_source(self, data_source_id: str, remove_files: bool = True):
- data_sources = self.query(id=data_source_id)
- if not data_sources or len(data_sources) != 1:
- return
- data_source = data_sources[0]
+ def remove_data_source(self, data_source: Union[str, DataSource], remove_files: bool = True):
+ if isinstance(data_source, str):
+ data_sources = self.query(id=data_source)
+ if not data_sources or len(data_sources) != 1:
+ return
+ data_source = data_sources[0]
file_name = os.path.join(self._store_dir, data_source.id + '.json')
os.remove(file_name)
if remove_files:
diff --git a/test/ds/test_esa_cci_odp.py b/test/ds/test_esa_cci_odp.py
index df51d60f8..eb9d90502 100644
--- a/test/ds/test_esa_cci_odp.py
+++ b/test/ds/test_esa_cci_odp.py
@@ -61,7 +61,7 @@ def setUp(self):
oc_data_sources = self.data_store.query(query_expr='OC')
self.assertIsNotNone(oc_data_sources)
self.assertIsNotNone(oc_data_sources[0])
- self.fist_oc_data_source = oc_data_sources[0]
+ self.first_oc_data_source = oc_data_sources[0]
self.tmp_dir = tempfile.mkdtemp()
self._existing_local_data_store = DATA_STORE_REGISTRY.get_data_store('local')
@@ -125,9 +125,9 @@ def build_file_item(item_name: str, date_from: datetime, date_to: datetime, size
with unittest.mock.patch('cate.ds.esa_cci_odp.EsaCciOdpDataSource._find_files', find_files_mock):
with unittest.mock.patch.object(EsaCciOdpDataStore, 'query', return_value=[]):
try:
- new_ds = self.fist_oc_data_source.make_local('local_ds_test', None,
- (datetime.datetime(1978, 11, 14, 0, 0),
- datetime.datetime(1978, 11, 15, 23, 59)))
+ new_ds = self.first_oc_data_source.make_local('local_ds_test', None,
+ (datetime.datetime(1978, 11, 14, 0, 0),
+ datetime.datetime(1978, 11, 15, 23, 59)))
except:
raise ValueError(reference_path, os.listdir(reference_path))
@@ -136,31 +136,31 @@ def build_file_item(item_name: str, date_from: datetime, date_to: datetime, size
(datetime.datetime(1978, 11, 14, 0, 0),
datetime.datetime(1978, 11, 15, 23, 59)))
- self.fist_oc_data_source.update_local(new_ds.id, (datetime.datetime(1978, 11, 15, 00, 00),
- datetime.datetime(1978, 11, 16, 23, 59)))
+ self.first_oc_data_source.update_local(new_ds.id, (datetime.datetime(1978, 11, 15, 00, 00),
+ datetime.datetime(1978, 11, 16, 23, 59)))
self.assertEqual(new_ds.temporal_coverage(), TimeRangeLike.convert(
(datetime.datetime(1978, 11, 15, 0, 0),
datetime.datetime(1978, 11, 16, 23, 59))))
- self.fist_oc_data_source.update_local(new_ds.id, (datetime.datetime(1978, 11, 14, 00, 00),
- datetime.datetime(1978, 11, 15, 23, 59)))
+ self.first_oc_data_source.update_local(new_ds.id, (datetime.datetime(1978, 11, 14, 00, 00),
+ datetime.datetime(1978, 11, 15, 23, 59)))
self.assertEqual(new_ds.temporal_coverage(), TimeRangeLike.convert(
(datetime.datetime(1978, 11, 14, 0, 0),
datetime.datetime(1978, 11, 15, 23, 59))))
with self.assertRaises(ValueError) as context:
- self.fist_oc_data_source.update_local("wrong_ds_name", (datetime.datetime(1978, 11, 15, 00, 00),
- datetime.datetime(1978, 11, 16, 23, 59)))
+ self.first_oc_data_source.update_local("wrong_ds_name", (datetime.datetime(1978, 11, 15, 00, 00),
+ datetime.datetime(1978, 11, 16, 23, 59)))
self.assertTrue("Couldn't find local DataSource", context.exception.args[0])
- new_ds_w_one_variable = self.fist_oc_data_source.make_local(
+ new_ds_w_one_variable = self.first_oc_data_source.make_local(
'local_ds_test_2', None, (datetime.datetime(1978, 11, 14, 0, 0),
datetime.datetime(1978, 11, 15, 23, 59)), None, ['sm'])
self.assertEqual(new_ds_w_one_variable.id, 'local.local_ds_test_2')
ds = new_ds_w_one_variable.open_dataset()
self.assertSetEqual(set(ds.variables), {'sm', 'lat', 'lon', 'time'})
- new_ds_w_region = self.fist_oc_data_source.make_local(
+ new_ds_w_region = self.first_oc_data_source.make_local(
'from_local_to_local_region', None, (datetime.datetime(1978, 11, 14, 0, 0),
datetime.datetime(1978, 11, 15, 23, 59)),
"10,10,20,20", ['sm']) # type: LocalDataSource
@@ -169,32 +169,38 @@ def build_file_item(item_name: str, date_from: datetime, date_to: datetime, size
data_set = new_ds_w_region.open_dataset()
self.assertSetEqual(set(data_set.variables), {'sm', 'lat', 'lon', 'time'})
+ no_data = self.first_oc_data_source.make_local(
+ 'empty_ds', None, (datetime.datetime(2017, 12, 1, 0, 0),
+ datetime.datetime(2017, 12, 31, 23, 59)),
+ )
+ self.assertIsNone(no_data)
+
def test_data_store(self):
- self.assertIs(self.fist_oc_data_source.data_store,
+ self.assertIs(self.first_oc_data_source.data_store,
self.data_store)
def test_id(self):
- self.assertEqual(self.fist_oc_data_source.id,
+ self.assertEqual(self.first_oc_data_source.id,
'esacci.OC.day.L3S.K_490.multi-sensor.multi-platform.MERGED.1-0.r2')
def test_schema(self):
- self.assertEqual(self.fist_oc_data_source.schema,
+ self.assertEqual(self.first_oc_data_source.schema,
None)
@unittest.skip(reason='outdated info string')
def test_info_string(self):
self.assertIn('product_string: MERGED\n',
- self.fist_oc_data_source.info_string)
+ self.first_oc_data_source.info_string)
def test_variables_info_string(self):
self.assertIn('kd_490 (m-1):\n',
- self.fist_oc_data_source.variables_info_string)
+ self.first_oc_data_source.variables_info_string)
self.assertIn('Long name: Downwelling attenuation coefficient at 490nm',
- self.fist_oc_data_source.variables_info_string)
+ self.first_oc_data_source.variables_info_string)
@unittest.skip(reason='ssl error on windows')
def test_temporal_coverage(self):
- self.assertEqual(self.fist_oc_data_source.temporal_coverage(),
+ self.assertEqual(self.first_oc_data_source.temporal_coverage(),
(datetime.datetime(1997, 9, 4, 0, 0), datetime.datetime(2000, 6, 24, 0, 0)))
def assert_tf(self, filename: str, expected_time_format: str):