diff --git a/satpy/dataset.py b/satpy/dataset.py index a1ffe38e54..18621e537b 100644 --- a/satpy/dataset.py +++ b/satpy/dataset.py @@ -15,8 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Dataset objects. -""" +"""Dataset objects.""" import sys import logging @@ -171,6 +170,7 @@ class DatasetID(DatasetID): """ def __new__(cls, *args, **kwargs): + """Create new DatasetID.""" ret = super(DatasetID, cls).__new__(cls, *args, **kwargs) if ret.modifiers is not None and not isinstance(ret.modifiers, tuple): raise TypeError("'DatasetID' modifiers must be a tuple or None, " @@ -269,6 +269,26 @@ def _to_trimmed_dict(self): if getattr(self, key) is not None} +def create_filtered_dsid(dataset_key, **dfilter): + """Create a DatasetID matching *dataset_key* and *dfilter*. + + If a proprety is specified in both *dataset_key* and *dfilter*, the former + has priority. + + """ + try: + ds_dict = dataset_key.to_dict() + except AttributeError: + if isinstance(dataset_key, str): + ds_dict = {'name': dataset_key} + elif isinstance(dataset_key, numbers.Number): + ds_dict = {'wavelength': dataset_key} + for key, value in dfilter.items(): + if value is not None: + ds_dict.setdefault(key, value) + return DatasetID.from_dict(ds_dict) + + def dataset_walker(datasets): """Walk through *datasets* and their ancillary data. diff --git a/satpy/node.py b/satpy/node.py index 3faaaa4992..c815b4d0de 100644 --- a/satpy/node.py +++ b/satpy/node.py @@ -20,6 +20,7 @@ from satpy import DatasetDict, DatasetID, DATASET_KEYS from satpy.readers import TooManyResults from satpy.utils import get_logger +from satpy.dataset import create_filtered_dsid LOG = get_logger(__name__) # Empty leaf used for marking composites with no prerequisites @@ -410,7 +411,7 @@ def _find_compositor(self, dataset_key, **dfilter): compositor = self.get_compositor(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) - dataset_key = compositor.id + dataset_key = create_filtered_dsid(compositor.id, **dfilter) root = Node(dataset_key, data=(compositor, [], [])) if src_node is not None: self.add_child(root, src_node) @@ -434,16 +435,7 @@ def _find_compositor(self, dataset_key, **dfilter): def get_filtered_item(self, dataset_key, **dfilter): """Get the item matching *dataset_key* and *dfilter*.""" - try: - ds_dict = dataset_key.to_dict() - except AttributeError: - if isinstance(dataset_key, str): - ds_dict = {'name': dataset_key} - elif isinstance(dataset_key, float): - ds_dict = {'wavelength': dataset_key} - clean_filter = {key: value for key, value in dfilter.items() if value is not None} - ds_dict.update(clean_filter) - dsid = DatasetID.from_dict(ds_dict) + dsid = create_filtered_dsid(dataset_key, **dfilter) return self[dsid] def _find_dependencies(self, dataset_key, **dfilter): @@ -463,7 +455,8 @@ def _find_dependencies(self, dataset_key, **dfilter): # 0 check if the *exact* dataset is already loaded try: - node = self.getitem(dataset_key) + dsid = create_filtered_dsid(dataset_key, **dfilter) + node = self.getitem(dsid) LOG.trace("Found exact dataset already loaded: {}".format(node.name)) return node, set() except KeyError: diff --git a/satpy/tests/test_scene.py b/satpy/tests/test_scene.py index 8081c97496..2aec7f1997 100644 --- a/satpy/tests/test_scene.py +++ b/satpy/tests/test_scene.py @@ -1047,6 +1047,28 @@ def test_load_multiple_resolutions(self, cri, cl): self.assertEqual(loaded_ids[1].name, 'comp25') self.assertEqual(loaded_ids[1].resolution, 1000) + @mock.patch('satpy.composites.CompositorLoader.load_compositors') + @mock.patch('satpy.scene.Scene.create_reader_instances') + def test_load_same_subcomposite(self, cri, cl): + """Test loading a composite and one of it's subcomposites at the same time.""" + import satpy.scene + from satpy.tests.utils import FakeReader, test_composites + cri.return_value = {'fake_reader': FakeReader( + 'fake_reader', 'fake_sensor')} + comps, mods = test_composites('fake_sensor') + cl.return_value = (comps, mods) + scene = satpy.scene.Scene(filenames=['bla'], + base_dir='bli', + reader='fake_reader') + + scene.load(['comp24', 'comp25'], resolution=500) + loaded_ids = list(scene.datasets.keys()) + self.assertEqual(len(loaded_ids), 2) + self.assertEqual(loaded_ids[0].name, 'comp24') + self.assertEqual(loaded_ids[0].resolution, 500) + self.assertEqual(loaded_ids[1].name, 'comp25') + self.assertEqual(loaded_ids[1].resolution, 500) + @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp5(self, cri, cl): @@ -1733,8 +1755,8 @@ def _test(self, sensor_names): scene.load(['comp11', 'comp23']) comp11_node = scene.dep_tree['comp11'] comp23_node = scene.dep_tree['comp23'] - self.assertEqual(comp11_node.data[1][-1].name, 'ds10') - self.assertEqual(comp23_node.data[1][0].name, 'ds8') + self.assertEqual(comp11_node.data[1][-1].name.name, 'ds10') + self.assertEqual(comp23_node.data[1][0].name.name, 'ds8') loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertIn('comp11', scene.datasets)