Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/wrapper_functions_api #500

Merged
merged 12 commits into from
Jul 6, 2022
Merged
8 changes: 4 additions & 4 deletions climada/test/test_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,8 +140,8 @@ def test_get_exposures(self):
dump_dir=DATA_DIR)
self.assertEqual(len(exposures.gdf), 5782)
self.assertEqual(np.unique(exposures.gdf.region_id), 40)
self.assertTrue('[0, 1]' in exposures.tag.description)
self.assertTrue('pop' in exposures.tag.description)
self.assertIn('(0, 1)', exposures.tag.description)
self.assertIn('pop', exposures.tag.description)
exposures

def test_get_exposures_fails(self):
Expand Down Expand Up @@ -189,9 +189,9 @@ def test_get_hazard_fails(self):
dump_dir=DATA_DIR)
self.assertIn('there are several datasets meeting the requirements:', str(cm.exception))

def test_get_litpop_default(self):
def test_get_litpop(self):
client = Client()
litpop = client.get_litpop_default(country='LUX', dump_dir=DATA_DIR)
litpop = client.get_litpop(country='LUX', dump_dir=DATA_DIR)
self.assertEqual(len(litpop.gdf), 188)
self.assertEqual(np.unique(litpop.gdf.region_id), 442)
self.assertTrue('[1, 1]' in litpop.tag.description)
Expand Down
79 changes: 66 additions & 13 deletions climada/util/api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

from climada import CONFIG
from climada.entity import Exposures
from climada.hazard import Hazard
from climada.hazard import Hazard, Centroids
from climada.util.constants import SYSTEM_DIR

LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -430,7 +430,10 @@ def get_dataset_info(self, data_type=None, name=None, version=None, properties=N
raise Client.AmbiguousResult("there are several datasets meeting the requirements:"
f" {jarr}")
if len(jarr) < 1:
raise Client.NoResult("there is no dataset meeting the requirements")
data_info = self.list_dataset_infos(data_type)
properties = self.get_property_values(data_info)
raise Client.NoResult("there is no dataset meeting the requirements, the following"
f" property values are available for {data_type}: {properties}")
return jarr[0]

def get_dataset_info_by_uuid(self, uuid):
Expand Down Expand Up @@ -532,12 +535,19 @@ def _tracked_download(self, remote_url, local_path):
raise Exception("tracked download requires a path to a file not a directory")
path_as_str = str(local_path.absolute())
try:
dlf = Download.create(url=remote_url, path=path_as_str, startdownload=datetime.utcnow())
dlf = Download.create(url=remote_url,
path=path_as_str,
startdownload=datetime.utcnow())
except IntegrityError as ierr:
dlf = Download.get(Download.path==path_as_str)
dlf = Download.get(Download.path==path_as_str) # path is the table's one unique column
if not Path(path_as_str).is_file(): # in case the file has been removed
dlf.delete_instance() # delete entry from database
return self._tracked_download(remote_url, local_path) # and try again
if dlf.url != remote_url:
raise Exception("this file has been downloaded from another url, "
"please purge the entry from data base before trying again") from ierr
raise Exception(f"this file ({path_as_str}) has been downloaded from another url"
f" ({dlf.url}), possibly because it belongs to a dataset with a"
" recent version update. Please remove the file or purge the entry"
" from data base before trying again") from ierr
return dlf
try:
self._download(url=remote_url, path=local_path, replace=True)
Expand Down Expand Up @@ -726,7 +736,6 @@ def to_hazard(self, dataset, dump_dir=SYSTEM_DIR):
"""
target_dir = self._organize_path(dataset, dump_dir) \
if dump_dir == SYSTEM_DIR else dump_dir

hazard_list = [
Hazard.from_hdf5(self._download_file(target_dir, dsf))
for dsf in dataset.files
Expand Down Expand Up @@ -813,7 +822,7 @@ def to_exposures(self, dataset, dump_dir=SYSTEM_DIR):
exposures_concat.check()
return exposures_concat

def get_litpop_default(self, country=None, dump_dir=SYSTEM_DIR):
def get_litpop(self, country=None, exponents=(1,1), dump_dir=SYSTEM_DIR):
"""Get a LitPop instance on a 150arcsec grid with the default parameters:
exponents = (1,1) and fin_mode = 'pc'.

Expand All @@ -822,6 +831,11 @@ def get_litpop_default(self, country=None, dump_dir=SYSTEM_DIR):
country : str or list, optional
List of country name or iso3 codes for which to create the LitPop object.
If None is given, a global LitPop instance is created. Defaut is None
exponents : tuple of two integers, optional
Defining power with which lit (nightlights) and pop (gpw) go into LitPop. To get
nightlights^3 without population count: (3, 0).
To use population count alone: (0, 1).
Default: (1, 1)
dump_dir : str
directory where the files should be downoladed. Default: SYSTEM_DIR

Expand All @@ -831,9 +845,7 @@ def get_litpop_default(self, country=None, dump_dir=SYSTEM_DIR):
default litpop Exposures object
"""
properties = {
'exponents': '(1,1)',
'fin_mode': 'pc'
}
'exponents': "".join(['(',str(exponents[0]),',',str(exponents[1]),')'])}
if country is None:
properties['spatial_coverage'] = 'global'
elif isinstance(country, str):
Expand All @@ -844,6 +856,49 @@ def get_litpop_default(self, country=None, dump_dir=SYSTEM_DIR):
raise ValueError("country must be string or list of strings")
return self.get_exposures(exposures_type='litpop', dump_dir=dump_dir, properties=properties)

def get_centroids(self, res_arcsec_land=150, res_arcsec_ocean=1800,
extent=(-180, 180, -60, 60), country=None,
dump_dir=SYSTEM_DIR):
"""Get centroids from teh API

Parameters
----------
res_land_arcsec : int
resolution for land centroids in arcsec. Default is 150
res_ocean_arcsec : int
resolution for ocean centroids in arcsec. Default is 1800
country : str
country name, numeric code or iso code based on pycountry. Default is None (global).
extent : tuple
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we put the default to have min_lat = -60 and max_lat = 60? Also, the default is not explained. Is it the whole world?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes for now I put the whole world, I would keep it like that. Because for most hazards it makes sense to have the entire globe

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But is it cut at high/low latitudes or not? This would be important to know. The default extent could then be with +-60degrees?

Format (min_lon, max_lon, min_lat, max_lat) tuple.
If min_lon > lon_max, the extend crosses the antimeridian and is
[lon_max, 180] + [-180, lon_min]
Borders are inclusive. Default is (-180, 180, -60, 60).
dump_dir : str
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The country_iso3alpha is missing (also the reg_id, but I would remove that one probably, or make it clear that it is simply the country iso3 numeric.

directory where the files should be downoladed. Default: SYSTEM_DIR
Returns
-------
climada.hazard.centroids.Centroids
Centroids from the api
"""

properties = {
'res_arcsec_land': str(res_arcsec_land),
'res_arcsec_ocean': str(res_arcsec_ocean),
'extent': '(-180, 180, -90, 90)'
}
dataset = self.get_dataset_info('centroids', properties=properties)
target_dir = self._organize_path(dataset, dump_dir) \
if dump_dir == SYSTEM_DIR else dump_dir
centroids = Centroids.from_hdf5(self._download_file(target_dir, dataset.files[0]))
if country:
reg_id = pycountry.countries.lookup(country).numeric
centroids = centroids.select(reg_id=int(reg_id), extent=extent)
if extent:
centroids = centroids.select(extent=extent)

return centroids

@staticmethod
def get_property_values(dataset_infos, known_property_values=None,
exclude_properties=None):
Expand Down Expand Up @@ -874,8 +929,6 @@ def get_property_values(dataset_infos, known_property_values=None,
if known_property_values:
for key, val in known_property_values.items():
ppdf = ppdf[ppdf[key] == val]
if len(ppdf) == 0:
raise Client.NoResult("there is no dataset meeting the requirements")

property_values = dict()
for col in ppdf.columns:
Expand Down
164 changes: 149 additions & 15 deletions doc/tutorial/climada_util_api_client.ipynb

Large diffs are not rendered by default.