Skip to content

Commit

Permalink
MNT: Update code for Python 3
Browse files Browse the repository at this point in the history
Found with pyupgrade 2.29.1
  • Loading branch information
dopplershift committed Nov 18, 2021
1 parent 8b7e353 commit 3316bb6
Show file tree
Hide file tree
Showing 23 changed files with 79 additions and 91 deletions.
2 changes: 0 additions & 2 deletions examples/Basic_Usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
"""

# This is currently a placeholder for a better example
from __future__ import print_function

from siphon.catalog import TDSCatalog
from siphon.http_util import session_manager

Expand Down
2 changes: 1 addition & 1 deletion examples/ncss/NCSS_Cartopy_Example.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
plt.colorbar(cf, ax=ax, fraction=0.032)

# Make a title with the time value
ax.set_title(u'Temperature forecast (\u00b0F) for {0:%d %B %Y %H:%MZ}'.format(time_val),
ax.set_title(f'Temperature forecast (\u00b0F) for {time_val:%d %B %Y %H:%MZ}',
fontsize=20)

# Plot markers for each lat/long to show grid points for 0.25 deg GFS
Expand Down
6 changes: 3 additions & 3 deletions examples/ncss/NCSS_Example.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@
# Now we can plot these up using matplotlib.
fig, ax = plt.subplots(1, 1, figsize=(9, 8))
ax.plot(temp_filtered[:].squeeze(), press_vals, 'r', linewidth=2)
ax.set_xlabel('{} ({})'.format(temp.standard_name, temp.units))
ax.set_ylabel('{} ({})'.format(press.standard_name, press.units))
ax.set_xlabel(f'{temp.standard_name} ({temp.units})')
ax.set_ylabel(f'{press.standard_name} ({press.units})')

# Create second plot with shared y-axis
ax2 = plt.twiny(ax)
ax2.plot(relh_filtered[:].squeeze(), press_vals, 'g', linewidth=2)
ax2.set_xlabel('{} ({})'.format(relh.standard_name, relh.units))
ax2.set_xlabel(f'{relh.standard_name} ({relh.units})')
ax.set_ylim(press_vals.max(), press_vals.min())
ax.grid(True)
2 changes: 1 addition & 1 deletion examples/ncss/NCSS_Timeseries_Examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,6 @@
# objects.
fig, ax = plt.subplots(1, 1, figsize=(9, 8))
ax.plot(time_vals, temp[:].squeeze(), 'r', linewidth=2)
ax.set_ylabel('{} ({})'.format(temp.standard_name, temp.units))
ax.set_ylabel(f'{temp.standard_name} ({temp.units})')
ax.set_xlabel('Forecast Time (UTC)')
ax.grid(True)
37 changes: 19 additions & 18 deletions src/siphon/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __getitem__(self, item):
"""Return an item either by index or name."""
try:
item + '' # Raises if item not a string
return super(IndexableMapping, self).__getitem__(item)
return super().__getitem__(item)
except TypeError:
return list(self.values())[item]

Expand Down Expand Up @@ -209,42 +209,42 @@ class CaseInsensitiveDict(dict):

def __init__(self, *args, **kwargs):
"""Create a dict with a set of lowercase keys."""
super(CaseInsensitiveDict, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._keys_to_lower()

def __eq__(self, other):
"""Return true if other is case-insensitive equal to self."""
return super(CaseInsensitiveDict, self).__eq__(CaseInsensitiveDict(other))
return super().__eq__(CaseInsensitiveDict(other))

def __getitem__(self, key):
"""Return value from case-insensitive lookup of ``key``."""
return super(CaseInsensitiveDict, self).__getitem__(CaseInsensitiveStr(key))
return super().__getitem__(CaseInsensitiveStr(key))

def __setitem__(self, key, value):
"""Set value with lowercase ``key``."""
super(CaseInsensitiveDict, self).__setitem__(CaseInsensitiveStr(key), value)
super().__setitem__(CaseInsensitiveStr(key), value)

def __delitem__(self, key):
"""Delete value associated with case-insensitive lookup of ``key``."""
return super(CaseInsensitiveDict, self).__delitem__(CaseInsensitiveStr(key))
return super().__delitem__(CaseInsensitiveStr(key))

def __contains__(self, key):
"""Return true if key set includes case-insensitive ``key``."""
return super(CaseInsensitiveDict, self).__contains__(CaseInsensitiveStr(key))
return super().__contains__(CaseInsensitiveStr(key))

def pop(self, key, *args, **kwargs):
"""Remove and return the value associated with case-insensitive ``key``."""
return super(CaseInsensitiveDict, self).pop(CaseInsensitiveStr(key))
return super().pop(CaseInsensitiveStr(key))

def _keys_to_lower(self):
"""Convert key set to lowercase."""
for k in list(self.keys()):
val = super(CaseInsensitiveDict, self).__getitem__(k)
super(CaseInsensitiveDict, self).__delitem__(k)
val = super().__getitem__(k)
super().__delitem__(k)
self.__setitem__(CaseInsensitiveStr(k), val)


class TDSCatalog(object):
class TDSCatalog:
"""
Parse information from a THREDDS Client Catalog.
Expand Down Expand Up @@ -330,8 +330,8 @@ def __init__(self, catalog_url):
elif (tag_type == 'metadata') or (tag_type == ''):
self._process_metadata(child, tag_type)
elif tag_type == 'service':
if CaseInsensitiveStr(child.attrib['serviceType'])\
!= CaseInsensitiveStr('Compound'):
if (CaseInsensitiveStr(child.attrib['serviceType'])
!= CaseInsensitiveStr('Compound')):
# we do not want to process single services if they
# are already contained within a compound service, so
# we need to skip over those cases.
Expand Down Expand Up @@ -380,8 +380,9 @@ def _process_datasets(self):
# check to see if dataset needs to have access urls created, if not,
# remove the dataset
has_url_path = self.datasets[ds_name].url_path is not None
is_ds_with_access_elements_to_process = \
is_ds_with_access_elements_to_process = (
ds_name in self.ds_with_access_elements_to_process
)
if has_url_path or is_ds_with_access_elements_to_process:
self.datasets[ds_name].make_access_urls(
self.base_tds_url, self.services, metadata=self.metadata)
Expand All @@ -400,7 +401,7 @@ def latest(self):
__repr__ = __str__


class CatalogRef(object):
class CatalogRef:
"""
An object for holding catalog references obtained from a THREDDS Client Catalog.
Expand Down Expand Up @@ -452,7 +453,7 @@ def follow(self):
__repr__ = __str__


class Dataset(object):
class Dataset:
"""
An object for holding Datasets obtained from a THREDDS Client Catalog.
Expand Down Expand Up @@ -755,7 +756,7 @@ def access_with_service(self, service, use_xarray=None):
__repr__ = __str__


class SimpleService(object):
class SimpleService:
"""Hold information about an access service enabled on a dataset.
Attributes
Expand Down Expand Up @@ -790,7 +791,7 @@ def is_resolver(self):
return self.service_type == 'Resolver'


class CompoundService(object):
class CompoundService:
"""Hold information about compound services.
Attributes
Expand Down
4 changes: 2 additions & 2 deletions src/siphon/cdmr/cdmremote.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class CDMRemote(HTTPEndPoint):

def __init__(self, url):
"""Initialize access to a particular url."""
super(CDMRemote, self).__init__(url)
super().__init__(url)
self.deflate = 0

def _fetch(self, query):
Expand Down Expand Up @@ -54,7 +54,7 @@ def query(self):
The created query.
"""
q = super(CDMRemote, self).query()
q = super().query()

# Turn on compression if it's been set on the object
if self.deflate:
Expand Down
10 changes: 5 additions & 5 deletions src/siphon/cdmr/coveragedataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class CoverageDataset(AttributeContainer):

def __init__(self, url):
"""Initialize CoverageDataset from a url pointing to CDMRemoteFeature endpoint."""
super(CoverageDataset, self).__init__()
super().__init__()
warnings.warn('CoverageDataset is in early development, unsupported, and API may '
'change at any time.')
self.cdmrf = CDMRemoteFeature(url)
Expand Down Expand Up @@ -74,9 +74,9 @@ def __str__(self):
if self.name:
print_groups.append(self.name + ' (' + str(self.type) + ')')

print_groups.append('Lon/Lat Domain: {0}'.format(self.lon_lat_domain))
print_groups.append('Projected Domain: {0}'.format(self.proj_domain))
print_groups.append('Date Range: {0}'.format(self.date_range))
print_groups.append(f'Lon/Lat Domain: {self.lon_lat_domain}')
print_groups.append(f'Projected Domain: {self.proj_domain}')
print_groups.append(f'Date Range: {self.date_range}')

indent = ' ' * 4
if self.axes:
Expand All @@ -102,5 +102,5 @@ def __str__(self):
if self.ncattrs():
print_groups.append('Attributes:')
for att in self.ncattrs():
print_groups.append('{0}{1}: {2}'.format(indent, att, getattr(self, att)))
print_groups.append(f'{indent}{att}: {getattr(self, att)}')
return '\n'.join(print_groups)
24 changes: 11 additions & 13 deletions src/siphon/cdmr/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Provide a netCDF4-like interface on top of CDMRemote and NCStream."""

from __future__ import print_function

from collections import OrderedDict
import enum
import logging
Expand All @@ -16,7 +14,7 @@
log = logging.getLogger(__name__)


class AttributeContainer(object):
class AttributeContainer:
"""Unpack and provide access to attributes."""

def __init__(self):
Expand All @@ -39,7 +37,7 @@ class Group(AttributeContainer):

def __init__(self, parent=None):
"""Initialize a Group."""
super(Group, self).__init__()
super().__init__()
self.groups = OrderedDict()
self.variables = OrderedDict()
self.dimensions = OrderedDict()
Expand Down Expand Up @@ -98,7 +96,7 @@ def __str__(self):
print_groups.append('Groups:')
for group in self.groups.values():
print_groups.append(str(group))
print_groups.append(str('---end group---'))
print_groups.append('---end group---')

if self.dimensions:
print_groups.append('Dimensions:')
Expand All @@ -118,7 +116,7 @@ def __str__(self):
if self.ncattrs():
print_groups.append('Attributes:')
for att in self.ncattrs():
print_groups.append('\t{}: {}'.format(att, getattr(self, att)))
print_groups.append(f'\t{att}: {getattr(self, att)}')
return '\n'.join(print_groups)

__repr__ = __str__
Expand All @@ -129,7 +127,7 @@ class Dataset(Group):

def __init__(self, url):
"""Initialize the dataset."""
super(Dataset, self).__init__()
super().__init__()
self.cdmr = CDMRemote(url)
self.url = url
self._read_header()
Expand All @@ -143,7 +141,7 @@ def _read_header(self):

def __str__(self):
"""Return a string representation of the Dataset and all contained members."""
return self.url + '\n' + super(Dataset, self).__str__()
return self.url + '\n' + super().__str__()

__repr__ = __str__

Expand All @@ -153,7 +151,7 @@ class Variable(AttributeContainer):

def __init__(self, group, name):
"""Initialize the Variable."""
super(Variable, self).__init__()
super().__init__()
self._group = group
self.name = name
self.dimensions = ()
Expand Down Expand Up @@ -317,7 +315,7 @@ def __str__(self):
groups.append('{} {}({})'.format(self.datatype, self.name,
', '.join(self.dimensions)))
for att in self.ncattrs():
groups.append('\t{}: {}'.format(att, getattr(self, att)))
groups.append(f'\t{att}: {getattr(self, att)}')
if self.ndim:
if self.ndim > 1:
shape_str = '(' + ', '.join(str(s) for s in self.shape) + ')'
Expand All @@ -327,7 +325,7 @@ def __str__(self):
return '\n'.join(groups)


class Dimension(object):
class Dimension:
"""Hold information about dimensions shared between variables."""

def __init__(self, group, name, size=None):
Expand Down Expand Up @@ -362,7 +360,7 @@ def __len__(self):

def __str__(self):
"""Return a string representation of the Dimension information."""
grps = ['{} '.format(type(self))]
grps = [f'{type(self)} ']
if self.unlimited:
grps.append('(unlimited): ')

Expand All @@ -374,7 +372,7 @@ def __str__(self):
if self.vlen:
grps.append(', (vlen)')
else:
grps.append(', size = {0}'.format(self.size))
grps.append(f', size = {self.size}')

return ''.join(grps)

Expand Down
10 changes: 3 additions & 7 deletions src/siphon/cdmr/ncstream.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Handle binary stream returns in NCStream format."""

from __future__ import print_function

from collections import OrderedDict
import itertools
import logging
Expand Down Expand Up @@ -62,8 +60,7 @@ def read_ncstream_data(fobj):
bin_data = zlib.decompress(bin_data)
assert len(bin_data) == data.uncompressedSize
elif data.compress != stream.NONE:
raise NotImplementedError('Compression type {0} not implemented!'.format(
data.compress))
raise NotImplementedError(f'Compression type {data.compress} not implemented!')

# Turn bytes into an array
return reshape_array(data, np.frombuffer(bin_data, dtype=dt))
Expand All @@ -87,8 +84,7 @@ def read_ncstream_data(fobj):
magic = read_magic(fobj)
return data, blocks
else:
raise NotImplementedError("Don't know how to handle data type: {0}".format(
data.dataType))
raise NotImplementedError(f"Don't know how to handle data type: {data.dataType}")


def read_ncstream_data2(fobj):
Expand Down Expand Up @@ -144,7 +140,7 @@ def read_messages(fobj, magic_table):
if func is not None:
messages.append(func(fobj))
else:
log.error('Unknown magic: ' + str(' '.join('{0:02x}'.format(b)
log.error('Unknown magic: ' + str(' '.join(f'{b:02x}'
for b in bytearray(magic))))

return messages
Expand Down
Loading

0 comments on commit 3316bb6

Please sign in to comment.