Skip to content
This repository has been archived by the owner on Aug 29, 2023. It is now read-only.

Commit

Permalink
#277 master merge, tests fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Krzysztof (Chris) Bernat committed Sep 7, 2017
2 parents 7e4d361 + 99290ec commit 8ee621c
Show file tree
Hide file tree
Showing 33 changed files with 280 additions and 182 deletions.
4 changes: 3 additions & 1 deletion CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
## Changes in version 0.9.0.dev7 (unrelease)
## Changes in version 0.9.0.dev7 (unreleased)

### Improvements and new Features

### Issues Fixed/Resolved

* Filter 't0' in the `make_local` step of **SOILMOISTURE** data sources to make the data usable
[#326](https://github.com/CCI-Tools/cate-core/issues/326)
* Use only tags from a predefined set (maybe module name && list in developers' guide)
[#280](https://github.com/CCI-Tools/cate-core/issues/280)

## Changes in version 0.9.0.dev6

Expand Down
7 changes: 3 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,9 @@ To install Cate for development and for the current user, use

$ python3 setup.py develop --user

Unfortunately, the installation fails on many platforms. In most cases the failure will be caused by the
`h5py` module dependency, which expects pre-installed HDF-5 C-libraries to be present on your computer.

On Windows, you may get around this by pre-installing the Cate dependencies (which you'll find in `setup.py`)
There is a **known issue on Windows** when installing into an existing Python environment. Installation may
fail due to an unresolved dependency to the `h5py` package, which expects pre-installed
HDF-5 C-libraries to be present on your computer. You may get around this by pre-installing the Cate dependencies (which you'll find in `setup.py`)
on your own, for example by using Christoph Gohlke's
[Unofficial Windows Binaries for Python Extension Packages](http://www.lfd.uci.edu/~gohlke/pythonlibs/).

Expand Down
17 changes: 13 additions & 4 deletions cate/ds/esa_cci_odp.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
from cate.conf import get_config_value
from cate.conf.defaults import NETCDF_COMPRESSION_LEVEL
from cate.core.ds import DATA_STORE_REGISTRY, DataStore, DataSource, Schema, \
open_xarray_dataset, get_data_stores_path, find_data_sources
open_xarray_dataset, get_data_stores_path
from cate.core.types import PolygonLike, TimeRange, TimeRangeLike, VarNamesLike, VarNames
from cate.ds.local import add_to_data_store_registry, LocalDataSource, LocalDataStore
from cate.util.monitor import Monitor
Expand Down Expand Up @@ -591,7 +591,14 @@ def update_local(self,
time_range: TimeRangeLike.TYPE,
monitor: Monitor = Monitor.NONE) -> bool:

data_sources = find_data_sources(id=local_id) # type: Sequence['DataSource']
local_store = DATA_STORE_REGISTRY.get_data_store('local')
if not local_store:
add_to_data_store_registry()
local_store = DATA_STORE_REGISTRY.get_data_store('local')
if not local_store:
raise ValueError('Cannot initialize `local` DataStore')

data_sources = local_store.query(id=local_id) # type: Sequence['DataSource']
data_source = next((ds for ds in data_sources if isinstance(ds, LocalDataSource) and
ds.id == local_id), None) # type: LocalDataSource
if not data_source:
Expand Down Expand Up @@ -621,9 +628,11 @@ def update_local(self,
if to_add:
for time_range_to_add in to_add:
self._make_local(data_source, time_range_to_add, None, data_source.variables_info, monitor)
data_source.meta_info['temporal_coverage_start'] = time_range[0]
data_source.meta_info['temporal_coverage_end'] = time_range[1]
data_source.update_temporal_coverage(time_range)

# TODO (chris): forman added False (?) to make signature happy
return False
return bool(to_remove or to_add)

def delete_local(self, time_range: TimeRangeLike.TYPE) -> int:

Expand Down
31 changes: 29 additions & 2 deletions cate/ds/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

from cate.conf import get_config_value
from cate.conf.defaults import NETCDF_COMPRESSION_LEVEL
from cate.core.ds import DATA_STORE_REGISTRY, DataStore, DataSource, open_xarray_dataset, find_data_sources
from cate.core.ds import DATA_STORE_REGISTRY, DataStore, DataSource, open_xarray_dataset
from cate.core.ds import get_data_stores_path
from cate.core.types import Polygon, PolygonLike, TimeRange, TimeRangeLike, VarNames, VarNamesLike
from cate.util.monitor import Monitor
Expand Down Expand Up @@ -373,13 +373,21 @@ def make_local(self,

local_store.register_ds(local_ds)
return local_ds
return None

def update_local(self,
local_id: str,
time_range: TimeRangeLike.TYPE,
monitor: Monitor = Monitor.NONE) -> bool:

data_sources = find_data_sources(id=local_id) # type: Sequence['DataSource']
local_store = DATA_STORE_REGISTRY.get_data_store('local')
if not local_store:
add_to_data_store_registry()
local_store = DATA_STORE_REGISTRY.get_data_store('local')
if not local_store:
raise ValueError('Cannot initialize `local` DataStore')

data_sources = local_store.query(id=local_id) # type: Sequence['DataSource']
data_source = next((ds for ds in data_sources if isinstance(ds, LocalDataSource) and
ds.id == local_id), None) # type: LocalDataSource
if not data_source:
Expand Down Expand Up @@ -408,6 +416,10 @@ def update_local(self,
if to_add:
for time_range_to_add in to_add:
self._make_local(data_source, time_range_to_add, None, data_source.variables_info, monitor)
data_source.meta_info['temporal_coverage_start'] = time_range[0]
data_source.meta_info['temporal_coverage_end'] = time_range[1]
data_source.update_temporal_coverage(time_range)

return bool(to_remove or to_add)

def add_dataset(self, file, time_coverage: TimeRangeLike.TYPE = None, update: bool = False,
Expand Down Expand Up @@ -441,6 +453,15 @@ def _extend_temporal_coverage(self, time_range: TimeRangeLike.TYPE):
self._temporal_coverage = tuple([time_range[0], self._temporal_coverage[1]])
else:
self._temporal_coverage = time_range
self.save()

def update_temporal_coverage(self, time_range: TimeRangeLike.TYPE):
"""
:param time_range: Time range to be added to data source temporal coverage
:return:
"""
self._extend_temporal_coverage(time_range)

def _reduce_temporal_coverage(self, time_range: TimeRangeLike.TYPE):
"""
Expand Down Expand Up @@ -676,6 +697,12 @@ def create_data_source(self, data_source_id: str, region: PolygonLike.TYPE = Non
del meta_info['uuid']

lock_filepath = os.path.join(self._store_dir, '{}.lock'.format(data_source_id))

if not data_source_id.startswith('%s.' % self.id):
data_source_id = '%s.%s' % (self.id, data_source_id)
lock_filename = '{}.lock'.format(data_source_id)
lock_filepath = os.path.join(self._store_dir, lock_filename)

data_source = None
for ds in self._data_sources:
if ds.id == data_source_id:
Expand Down
6 changes: 2 additions & 4 deletions cate/ops/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,12 @@ def cate_init():
from .resampling import resample_2d, downsample_2d, upsample_2d
from .subset import subset_spatial, subset_temporal, subset_temporal_index
from .timeseries import tseries_point, tseries_mean
from .utility import sel, from_dataframe, identity, literal
from .utility import sel, from_dataframe, identity, literal, pandas_fillna
from .aggregate import long_term_average, temporal_aggregation
from .arithmetics import ds_arithmetics, diff
from .anomaly import anomaly_internal, anomaly_external
from .index import enso, enso_nino34, oni
from .outliers import detect_outliers
from .pandas import pandas_fillna


__all__ = [
Expand Down Expand Up @@ -111,6 +110,7 @@ def cate_init():
'from_dataframe',
'identity',
'literal',
'pandas_fillna',
# .aggregate
'long_term_average',
'temporal_aggregation',
Expand All @@ -126,6 +126,4 @@ def cate_init():
'oni',
# .outliers
'detect_outliers',
# .pandas
'pandas_fillna'
]
4 changes: 2 additions & 2 deletions cate/ops/aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from cate.ops.normalize import adjust_temporal_attrs


@op(tags=['aggregate'], version='1.0')
@op(tags=['aggregate', 'temporal'], version='1.0')
@op_input('ds', data_type=DatasetLike)
@op_input('var', value_set_source='ds', data_type=VarNamesLike)
@op_return(add_history=True)
Expand Down Expand Up @@ -132,7 +132,7 @@ def _mean(ds: xr.Dataset, monitor: Monitor, step: float):
return retset


@op(tags=['aggregate'], version='1.0')
@op(tags=['aggregate', 'temporal'], version='1.0')
@op_input('method', value_set=['mean', 'max', 'median', 'prod', 'sum', 'std',
'var', 'argmax', 'argmin', 'first', 'last'])
@op_input('ds', data_type=DatasetLike)
Expand Down
2 changes: 1 addition & 1 deletion cate/ops/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
_ALL_FILE_FILTER = dict(name='All Files', extensions=['*'])


@op(tags=['anomaly', 'climatology'], version='1.0')
@op(tags=['anomaly'], version='1.0')
@op_input('file', file_open_mode='w', file_filters=[dict(name='NetCDF', extensions=['nc']), _ALL_FILE_FILTER])
@op_return(add_history=True)
def anomaly_external(ds: xr.Dataset,
Expand Down
2 changes: 1 addition & 1 deletion cate/ops/coregistration.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
from cate.ops.normalize import adjust_spatial_attrs


@op(tags=['geometric', 'coregistration', 'geom', 'global', 'resampling'],
@op(tags=['geometric', 'coregistration'],
version='1.0')
@op_input('method_us', value_set=['nearest', 'linear'])
@op_input('method_ds', value_set=['first', 'last', 'mean', 'mode', 'var', 'std'])
Expand Down
2 changes: 1 addition & 1 deletion cate/ops/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
_ALL_FILE_FILTER = dict(name='All Files', extensions=['*'])


@op(tags=['index', 'nino34'])
@op(tags=['index'])
@op_input('file', file_open_mode='r', file_filters=[dict(name='NetCDF', extensions=['nc']), _ALL_FILE_FILTER])
@op_input('var', value_set_source='ds', data_type=VarName)
def enso_nino34(ds: xr.Dataset,
Expand Down
2 changes: 1 addition & 1 deletion cate/ops/outliers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from cate import __version__


@op(version='1.0')
@op(tags=['filter'], version='1.0')
@op_input('ds', data_type=DatasetLike)
@op_input('var', value_set_source='ds', data_type=VarNamesLike)
@op_return(add_history=True)
Expand Down
71 changes: 0 additions & 71 deletions cate/ops/pandas.py

This file was deleted.

2 changes: 1 addition & 1 deletion cate/ops/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
PLOT_FILE_FILTER = dict(name='Plot Outputs', extensions=PLOT_FILE_EXTENSIONS)


@op(tags=['plot', 'map'])
@op(tags=['plot'])
@op_input('ds')
@op_input('var', value_set_source='ds', data_type=VarName)
@op_input('indexers', data_type=DictLike)
Expand Down
4 changes: 2 additions & 2 deletions cate/ops/select.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from cate.core.types import VarNamesLike


@op(tags=['select', 'subset', 'filter', 'var'])
@op(tags=['filter'])
@op_input('ds')
@op_input('var', value_set_source='ds', data_type=VarNamesLike)
def select_var(ds: xr.Dataset, var: VarNamesLike.TYPE = None) -> xr.Dataset:
Expand Down Expand Up @@ -68,7 +68,7 @@ def select_var(ds: xr.Dataset, var: VarNamesLike.TYPE = None) -> xr.Dataset:
return ds.drop(dropped_var_names)


@op(tags=['select', 'filter', 'var'])
@op(tags=['filter'])
def select_features(df: gpd.GeoDataFrame, var: dict = None) -> gpd.GeoDataFrame:
"""
Filter the dataframe, by leaving only the desired features in it. The original dataframe
Expand Down
6 changes: 3 additions & 3 deletions cate/ops/subset.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
from cate.ops.normalize import adjust_spatial_attrs, adjust_temporal_attrs


@op(tags=['geometric', 'subset', 'spatial', 'geom'], version='1.0')
@op(tags=['geometric', 'spatial', 'subset'], version='1.0')
@op_input('region', data_type=PolygonLike)
@op_return(add_history=True)
def subset_spatial(ds: xr.Dataset,
Expand Down Expand Up @@ -175,7 +175,7 @@ def _crosses_antimeridian(region: PolygonLike.TYPE) -> bool:
return False


@op(tags=['subset', 'temporal'], version='1.0')
@op(tags=['subset', 'temporal', 'filter'], version='1.0')
@op_input('ds', data_type=DatasetLike)
@op_input('time_range', data_type=TimeRangeLike)
@op_return(add_history=True)
Expand All @@ -202,7 +202,7 @@ def subset_temporal(ds: xr.Dataset,
' dataset may help'.format(ds.time.dtype))


@op(tags=['subset', 'temporal'], version='1.0')
@op(tags=['subset', 'temporal', 'filter', 'utility'], version='1.0')
@op_input('ds', data_type=DatasetLike)
@op_return(add_history=True)
def subset_temporal_index(ds: xr.Dataset,
Expand Down
4 changes: 2 additions & 2 deletions cate/ops/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from cate.core.types import VarNamesLike, PointLike


@op(tags=['timeseries', 'temporal', 'point'])
@op(tags=['timeseries', 'temporal', 'filter', 'point'])
@op_input('point', data_type=PointLike)
@op_input('method', value_set=['nearest', 'ffill', 'bfill'])
@op_input('var', value_set_source='ds', data_type=VarNamesLike)
Expand Down Expand Up @@ -75,7 +75,7 @@ def tseries_point(ds: xr.Dataset,
return retset.sel(method=method, **indexers)


@op(tags=['timeseries', 'temporal', 'aggregate', 'mean'])
@op(tags=['timeseries', 'temporal'])
@op_input('ds')
@op_input('var', value_set_source='ds', data_type=VarNamesLike)
def tseries_mean(ds: xr.Dataset,
Expand Down
Loading

0 comments on commit 8ee621c

Please sign in to comment.