Skip to content

Commit

Permalink
Merge pull request #836 from mraspaud/fix-dep-tree-resolution
Browse files Browse the repository at this point in the history
Fix composites not being recorded with desired resolution in deptree
  • Loading branch information
mraspaud authored Jul 2, 2019
2 parents 5def3dc + 3e91628 commit 44239a0
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 16 deletions.
24 changes: 22 additions & 2 deletions satpy/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Dataset objects.
"""
"""Dataset objects."""

import sys
import logging
Expand Down Expand Up @@ -171,6 +170,7 @@ class DatasetID(DatasetID):
"""

def __new__(cls, *args, **kwargs):
"""Create new DatasetID."""
ret = super(DatasetID, cls).__new__(cls, *args, **kwargs)
if ret.modifiers is not None and not isinstance(ret.modifiers, tuple):
raise TypeError("'DatasetID' modifiers must be a tuple or None, "
Expand Down Expand Up @@ -269,6 +269,26 @@ def _to_trimmed_dict(self):
if getattr(self, key) is not None}


def create_filtered_dsid(dataset_key, **dfilter):
"""Create a DatasetID matching *dataset_key* and *dfilter*.
If a proprety is specified in both *dataset_key* and *dfilter*, the former
has priority.
"""
try:
ds_dict = dataset_key.to_dict()
except AttributeError:
if isinstance(dataset_key, str):
ds_dict = {'name': dataset_key}
elif isinstance(dataset_key, numbers.Number):
ds_dict = {'wavelength': dataset_key}
for key, value in dfilter.items():
if value is not None:
ds_dict.setdefault(key, value)
return DatasetID.from_dict(ds_dict)


def dataset_walker(datasets):
"""Walk through *datasets* and their ancillary data.
Expand Down
17 changes: 5 additions & 12 deletions satpy/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from satpy import DatasetDict, DatasetID, DATASET_KEYS
from satpy.readers import TooManyResults
from satpy.utils import get_logger
from satpy.dataset import create_filtered_dsid

LOG = get_logger(__name__)
# Empty leaf used for marking composites with no prerequisites
Expand Down Expand Up @@ -410,7 +411,7 @@ def _find_compositor(self, dataset_key, **dfilter):
compositor = self.get_compositor(dataset_key)
except KeyError:
raise KeyError("Can't find anything called {}".format(str(dataset_key)))
dataset_key = compositor.id
dataset_key = create_filtered_dsid(compositor.id, **dfilter)
root = Node(dataset_key, data=(compositor, [], []))
if src_node is not None:
self.add_child(root, src_node)
Expand All @@ -434,16 +435,7 @@ def _find_compositor(self, dataset_key, **dfilter):

def get_filtered_item(self, dataset_key, **dfilter):
"""Get the item matching *dataset_key* and *dfilter*."""
try:
ds_dict = dataset_key.to_dict()
except AttributeError:
if isinstance(dataset_key, str):
ds_dict = {'name': dataset_key}
elif isinstance(dataset_key, float):
ds_dict = {'wavelength': dataset_key}
clean_filter = {key: value for key, value in dfilter.items() if value is not None}
ds_dict.update(clean_filter)
dsid = DatasetID.from_dict(ds_dict)
dsid = create_filtered_dsid(dataset_key, **dfilter)
return self[dsid]

def _find_dependencies(self, dataset_key, **dfilter):
Expand All @@ -463,7 +455,8 @@ def _find_dependencies(self, dataset_key, **dfilter):

# 0 check if the *exact* dataset is already loaded
try:
node = self.getitem(dataset_key)
dsid = create_filtered_dsid(dataset_key, **dfilter)
node = self.getitem(dsid)
LOG.trace("Found exact dataset already loaded: {}".format(node.name))
return node, set()
except KeyError:
Expand Down
26 changes: 24 additions & 2 deletions satpy/tests/test_scene.py
Original file line number Diff line number Diff line change
Expand Up @@ -1047,6 +1047,28 @@ def test_load_multiple_resolutions(self, cri, cl):
self.assertEqual(loaded_ids[1].name, 'comp25')
self.assertEqual(loaded_ids[1].resolution, 1000)

@mock.patch('satpy.composites.CompositorLoader.load_compositors')
@mock.patch('satpy.scene.Scene.create_reader_instances')
def test_load_same_subcomposite(self, cri, cl):
"""Test loading a composite and one of it's subcomposites at the same time."""
import satpy.scene
from satpy.tests.utils import FakeReader, test_composites
cri.return_value = {'fake_reader': FakeReader(
'fake_reader', 'fake_sensor')}
comps, mods = test_composites('fake_sensor')
cl.return_value = (comps, mods)
scene = satpy.scene.Scene(filenames=['bla'],
base_dir='bli',
reader='fake_reader')

scene.load(['comp24', 'comp25'], resolution=500)
loaded_ids = list(scene.datasets.keys())
self.assertEqual(len(loaded_ids), 2)
self.assertEqual(loaded_ids[0].name, 'comp24')
self.assertEqual(loaded_ids[0].resolution, 500)
self.assertEqual(loaded_ids[1].name, 'comp25')
self.assertEqual(loaded_ids[1].resolution, 500)

@mock.patch('satpy.composites.CompositorLoader.load_compositors')
@mock.patch('satpy.scene.Scene.create_reader_instances')
def test_load_comp5(self, cri, cl):
Expand Down Expand Up @@ -1733,8 +1755,8 @@ def _test(self, sensor_names):
scene.load(['comp11', 'comp23'])
comp11_node = scene.dep_tree['comp11']
comp23_node = scene.dep_tree['comp23']
self.assertEqual(comp11_node.data[1][-1].name, 'ds10')
self.assertEqual(comp23_node.data[1][0].name, 'ds8')
self.assertEqual(comp11_node.data[1][-1].name.name, 'ds10')
self.assertEqual(comp23_node.data[1][0].name.name, 'ds8')
loaded_ids = list(scene.datasets.keys())
self.assertEqual(len(loaded_ids), 2)
self.assertIn('comp11', scene.datasets)
Expand Down

0 comments on commit 44239a0

Please sign in to comment.