diff --git a/examples/Basic_Usage.py b/examples/Basic_Usage.py index d86a8aa66..f917bdd9c 100644 --- a/examples/Basic_Usage.py +++ b/examples/Basic_Usage.py @@ -13,7 +13,6 @@ """ # This is currently a placeholder for a better example -from __future__ import print_function from siphon.catalog import TDSCatalog from siphon.http_util import session_manager diff --git a/examples/ncss/NCSS_Cartopy_Example.py b/examples/ncss/NCSS_Cartopy_Example.py index 57eb560c0..1ba80cc9a 100644 --- a/examples/ncss/NCSS_Cartopy_Example.py +++ b/examples/ncss/NCSS_Cartopy_Example.py @@ -111,7 +111,7 @@ plt.colorbar(cf, ax=ax, fraction=0.032) # Make a title with the time value -ax.set_title(u'Temperature forecast (\u00b0F) for {0:%d %B %Y %H:%MZ}'.format(time_val), +ax.set_title(f'Temperature forecast (\u00b0F) for {time_val:%d %B %Y %H:%MZ}', fontsize=20) # Plot markers for each lat/long to show grid points for 0.25 deg GFS diff --git a/examples/ncss/NCSS_Example.py b/examples/ncss/NCSS_Example.py index 11c11d36b..0411aac83 100644 --- a/examples/ncss/NCSS_Example.py +++ b/examples/ncss/NCSS_Example.py @@ -72,12 +72,12 @@ # Now we can plot these up using matplotlib. fig, ax = plt.subplots(1, 1, figsize=(9, 8)) ax.plot(temp_filtered[:].squeeze(), press_vals, 'r', linewidth=2) -ax.set_xlabel('{} ({})'.format(temp.standard_name, temp.units)) -ax.set_ylabel('{} ({})'.format(press.standard_name, press.units)) +ax.set_xlabel(f'{temp.standard_name} ({temp.units})') +ax.set_ylabel(f'{press.standard_name} ({press.units})') # Create second plot with shared y-axis ax2 = plt.twiny(ax) ax2.plot(relh_filtered[:].squeeze(), press_vals, 'g', linewidth=2) -ax2.set_xlabel('{} ({})'.format(relh.standard_name, relh.units)) +ax2.set_xlabel(f'{relh.standard_name} ({relh.units})') ax.set_ylim(press_vals.max(), press_vals.min()) ax.grid(True) diff --git a/examples/ncss/NCSS_Timeseries_Examples.py b/examples/ncss/NCSS_Timeseries_Examples.py index 52de3dd4c..b3ce05b4e 100644 --- a/examples/ncss/NCSS_Timeseries_Examples.py +++ b/examples/ncss/NCSS_Timeseries_Examples.py @@ -68,6 +68,6 @@ # objects. fig, ax = plt.subplots(1, 1, figsize=(9, 8)) ax.plot(time_vals, temp[:].squeeze(), 'r', linewidth=2) -ax.set_ylabel('{} ({})'.format(temp.standard_name, temp.units)) +ax.set_ylabel(f'{temp.standard_name} ({temp.units})') ax.set_xlabel('Forecast Time (UTC)') ax.grid(True) diff --git a/src/siphon/catalog.py b/src/siphon/catalog.py index 1bbdb302d..6dbc2dfb4 100644 --- a/src/siphon/catalog.py +++ b/src/siphon/catalog.py @@ -33,7 +33,7 @@ def __getitem__(self, item): """Return an item either by index or name.""" try: item + '' # Raises if item not a string - return super(IndexableMapping, self).__getitem__(item) + return super().__getitem__(item) except TypeError: return list(self.values())[item] @@ -209,42 +209,42 @@ class CaseInsensitiveDict(dict): def __init__(self, *args, **kwargs): """Create a dict with a set of lowercase keys.""" - super(CaseInsensitiveDict, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._keys_to_lower() def __eq__(self, other): """Return true if other is case-insensitive equal to self.""" - return super(CaseInsensitiveDict, self).__eq__(CaseInsensitiveDict(other)) + return super().__eq__(CaseInsensitiveDict(other)) def __getitem__(self, key): """Return value from case-insensitive lookup of ``key``.""" - return super(CaseInsensitiveDict, self).__getitem__(CaseInsensitiveStr(key)) + return super().__getitem__(CaseInsensitiveStr(key)) def __setitem__(self, key, value): """Set value with lowercase ``key``.""" - super(CaseInsensitiveDict, self).__setitem__(CaseInsensitiveStr(key), value) + super().__setitem__(CaseInsensitiveStr(key), value) def __delitem__(self, key): """Delete value associated with case-insensitive lookup of ``key``.""" - return super(CaseInsensitiveDict, self).__delitem__(CaseInsensitiveStr(key)) + return super().__delitem__(CaseInsensitiveStr(key)) def __contains__(self, key): """Return true if key set includes case-insensitive ``key``.""" - return super(CaseInsensitiveDict, self).__contains__(CaseInsensitiveStr(key)) + return super().__contains__(CaseInsensitiveStr(key)) def pop(self, key, *args, **kwargs): """Remove and return the value associated with case-insensitive ``key``.""" - return super(CaseInsensitiveDict, self).pop(CaseInsensitiveStr(key)) + return super().pop(CaseInsensitiveStr(key)) def _keys_to_lower(self): """Convert key set to lowercase.""" for k in list(self.keys()): - val = super(CaseInsensitiveDict, self).__getitem__(k) - super(CaseInsensitiveDict, self).__delitem__(k) + val = super().__getitem__(k) + super().__delitem__(k) self.__setitem__(CaseInsensitiveStr(k), val) -class TDSCatalog(object): +class TDSCatalog: """ Parse information from a THREDDS Client Catalog. @@ -330,8 +330,8 @@ def __init__(self, catalog_url): elif (tag_type == 'metadata') or (tag_type == ''): self._process_metadata(child, tag_type) elif tag_type == 'service': - if CaseInsensitiveStr(child.attrib['serviceType'])\ - != CaseInsensitiveStr('Compound'): + if (CaseInsensitiveStr(child.attrib['serviceType']) + != CaseInsensitiveStr('Compound')): # we do not want to process single services if they # are already contained within a compound service, so # we need to skip over those cases. @@ -380,8 +380,9 @@ def _process_datasets(self): # check to see if dataset needs to have access urls created, if not, # remove the dataset has_url_path = self.datasets[ds_name].url_path is not None - is_ds_with_access_elements_to_process = \ + is_ds_with_access_elements_to_process = ( ds_name in self.ds_with_access_elements_to_process + ) if has_url_path or is_ds_with_access_elements_to_process: self.datasets[ds_name].make_access_urls( self.base_tds_url, self.services, metadata=self.metadata) @@ -400,7 +401,7 @@ def latest(self): __repr__ = __str__ -class CatalogRef(object): +class CatalogRef: """ An object for holding catalog references obtained from a THREDDS Client Catalog. @@ -452,7 +453,7 @@ def follow(self): __repr__ = __str__ -class Dataset(object): +class Dataset: """ An object for holding Datasets obtained from a THREDDS Client Catalog. @@ -755,7 +756,7 @@ def access_with_service(self, service, use_xarray=None): __repr__ = __str__ -class SimpleService(object): +class SimpleService: """Hold information about an access service enabled on a dataset. Attributes @@ -790,7 +791,7 @@ def is_resolver(self): return self.service_type == 'Resolver' -class CompoundService(object): +class CompoundService: """Hold information about compound services. Attributes diff --git a/src/siphon/cdmr/cdmremote.py b/src/siphon/cdmr/cdmremote.py index cf38e0c47..17a4af025 100644 --- a/src/siphon/cdmr/cdmremote.py +++ b/src/siphon/cdmr/cdmremote.py @@ -14,7 +14,7 @@ class CDMRemote(HTTPEndPoint): def __init__(self, url): """Initialize access to a particular url.""" - super(CDMRemote, self).__init__(url) + super().__init__(url) self.deflate = 0 def _fetch(self, query): @@ -54,7 +54,7 @@ def query(self): The created query. """ - q = super(CDMRemote, self).query() + q = super().query() # Turn on compression if it's been set on the object if self.deflate: diff --git a/src/siphon/cdmr/coveragedataset.py b/src/siphon/cdmr/coveragedataset.py index ef58ac22e..7d885a1a1 100644 --- a/src/siphon/cdmr/coveragedataset.py +++ b/src/siphon/cdmr/coveragedataset.py @@ -27,7 +27,7 @@ class CoverageDataset(AttributeContainer): def __init__(self, url): """Initialize CoverageDataset from a url pointing to CDMRemoteFeature endpoint.""" - super(CoverageDataset, self).__init__() + super().__init__() warnings.warn('CoverageDataset is in early development, unsupported, and API may ' 'change at any time.') self.cdmrf = CDMRemoteFeature(url) @@ -74,9 +74,9 @@ def __str__(self): if self.name: print_groups.append(self.name + ' (' + str(self.type) + ')') - print_groups.append('Lon/Lat Domain: {0}'.format(self.lon_lat_domain)) - print_groups.append('Projected Domain: {0}'.format(self.proj_domain)) - print_groups.append('Date Range: {0}'.format(self.date_range)) + print_groups.append(f'Lon/Lat Domain: {self.lon_lat_domain}') + print_groups.append(f'Projected Domain: {self.proj_domain}') + print_groups.append(f'Date Range: {self.date_range}') indent = ' ' * 4 if self.axes: @@ -102,5 +102,5 @@ def __str__(self): if self.ncattrs(): print_groups.append('Attributes:') for att in self.ncattrs(): - print_groups.append('{0}{1}: {2}'.format(indent, att, getattr(self, att))) + print_groups.append(f'{indent}{att}: {getattr(self, att)}') return '\n'.join(print_groups) diff --git a/src/siphon/cdmr/dataset.py b/src/siphon/cdmr/dataset.py index d83b144e0..db181299e 100644 --- a/src/siphon/cdmr/dataset.py +++ b/src/siphon/cdmr/dataset.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: BSD-3-Clause """Provide a netCDF4-like interface on top of CDMRemote and NCStream.""" -from __future__ import print_function from collections import OrderedDict import enum @@ -16,7 +15,7 @@ log = logging.getLogger(__name__) -class AttributeContainer(object): +class AttributeContainer: """Unpack and provide access to attributes.""" def __init__(self): @@ -39,7 +38,7 @@ class Group(AttributeContainer): def __init__(self, parent=None): """Initialize a Group.""" - super(Group, self).__init__() + super().__init__() self.groups = OrderedDict() self.variables = OrderedDict() self.dimensions = OrderedDict() @@ -98,7 +97,7 @@ def __str__(self): print_groups.append('Groups:') for group in self.groups.values(): print_groups.append(str(group)) - print_groups.append(str('---end group---')) + print_groups.append('---end group---') if self.dimensions: print_groups.append('Dimensions:') @@ -118,7 +117,7 @@ def __str__(self): if self.ncattrs(): print_groups.append('Attributes:') for att in self.ncattrs(): - print_groups.append('\t{}: {}'.format(att, getattr(self, att))) + print_groups.append(f'\t{att}: {getattr(self, att)}') return '\n'.join(print_groups) __repr__ = __str__ @@ -129,7 +128,7 @@ class Dataset(Group): def __init__(self, url): """Initialize the dataset.""" - super(Dataset, self).__init__() + super().__init__() self.cdmr = CDMRemote(url) self.url = url self._read_header() @@ -143,7 +142,7 @@ def _read_header(self): def __str__(self): """Return a string representation of the Dataset and all contained members.""" - return self.url + '\n' + super(Dataset, self).__str__() + return self.url + '\n' + super().__str__() __repr__ = __str__ @@ -153,7 +152,7 @@ class Variable(AttributeContainer): def __init__(self, group, name): """Initialize the Variable.""" - super(Variable, self).__init__() + super().__init__() self._group = group self.name = name self.dimensions = () @@ -317,7 +316,7 @@ def __str__(self): groups.append('{} {}({})'.format(self.datatype, self.name, ', '.join(self.dimensions))) for att in self.ncattrs(): - groups.append('\t{}: {}'.format(att, getattr(self, att))) + groups.append(f'\t{att}: {getattr(self, att)}') if self.ndim: if self.ndim > 1: shape_str = '(' + ', '.join(str(s) for s in self.shape) + ')' @@ -327,7 +326,7 @@ def __str__(self): return '\n'.join(groups) -class Dimension(object): +class Dimension: """Hold information about dimensions shared between variables.""" def __init__(self, group, name, size=None): @@ -362,7 +361,7 @@ def __len__(self): def __str__(self): """Return a string representation of the Dimension information.""" - grps = ['{} '.format(type(self))] + grps = [f'{type(self)} '] if self.unlimited: grps.append('(unlimited): ') @@ -374,7 +373,7 @@ def __str__(self): if self.vlen: grps.append(', (vlen)') else: - grps.append(', size = {0}'.format(self.size)) + grps.append(f', size = {self.size}') return ''.join(grps) diff --git a/src/siphon/cdmr/ncstream.py b/src/siphon/cdmr/ncstream.py index 468c624b7..d353ca321 100644 --- a/src/siphon/cdmr/ncstream.py +++ b/src/siphon/cdmr/ncstream.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: BSD-3-Clause """Handle binary stream returns in NCStream format.""" -from __future__ import print_function from collections import OrderedDict import itertools @@ -62,8 +61,7 @@ def read_ncstream_data(fobj): bin_data = zlib.decompress(bin_data) assert len(bin_data) == data.uncompressedSize elif data.compress != stream.NONE: - raise NotImplementedError('Compression type {0} not implemented!'.format( - data.compress)) + raise NotImplementedError(f'Compression type {data.compress} not implemented!') # Turn bytes into an array return reshape_array(data, np.frombuffer(bin_data, dtype=dt)) @@ -87,8 +85,7 @@ def read_ncstream_data(fobj): magic = read_magic(fobj) return data, blocks else: - raise NotImplementedError("Don't know how to handle data type: {0}".format( - data.dataType)) + raise NotImplementedError(f"Don't know how to handle data type: {data.dataType}") def read_ncstream_data2(fobj): @@ -144,7 +141,7 @@ def read_messages(fobj, magic_table): if func is not None: messages.append(func(fobj)) else: - log.error('Unknown magic: ' + str(' '.join('{0:02x}'.format(b) + log.error('Unknown magic: ' + str(' '.join(f'{b:02x}' for b in bytearray(magic)))) return messages diff --git a/src/siphon/http_util.py b/src/siphon/http_util.py index 81be55604..22f5fa782 100644 --- a/src/siphon/http_util.py +++ b/src/siphon/http_util.py @@ -49,12 +49,12 @@ def dst(self, dt): # pylint:disable=unused-argument utc = UTC() -class HTTPSessionManager(object): +class HTTPSessionManager: """Manage the creation of sessions for HTTP access.""" def __init__(self): """Initialize ``HTTPSessionManager``.""" - self.user_agent = 'Siphon ({})'.format(__version__) + self.user_agent = f'Siphon ({__version__})' self.options = {} def set_session_options(self, **kwargs): @@ -139,7 +139,7 @@ def parse_iso_date(s): return datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=utc) -class DataQuery(object): +class DataQuery: """Represent a query for data from a THREDDS server. This object provides a clear API to formulate a query for data, including @@ -366,7 +366,7 @@ def __repr__(self): return str(self) -class HTTPEndPoint(object): +class HTTPEndPoint: """An object representing an endpoint on a server that is accessed using HTTP. This provides a simple way to point to a URL, formulate appropriate queries and @@ -489,10 +489,10 @@ def get(self, path, params=None): text = resp.reason else: text = resp.text - raise requests.HTTPError('Error accessing {0}\n' - 'Server Error ({1:d}: {2})'.format(resp.request.url, - resp.status_code, - text)) + raise requests.HTTPError('Error accessing {}\n' + 'Server Error ({:d}: {})'.format(resp.request.url, + resp.status_code, + text)) return resp def _get_metadata(self): diff --git a/src/siphon/metadata.py b/src/siphon/metadata.py index d5692aae8..e5e2ccfcf 100644 --- a/src/siphon/metadata.py +++ b/src/siphon/metadata.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: BSD-3-Clause """Helps support reading and parsing metadata elements from a TDS client catalog.""" -from __future__ import print_function import logging @@ -14,7 +13,7 @@ xlink_title_attr = '{http://www.w3.org/1999/xlink}title' -class _SimpleTypes(object): +class _SimpleTypes: def __init__(self): self._valid = {'dataFormat': self._load_valid_data_format_types(), 'upOrDown': self._load_valid_up_or_down(), @@ -165,7 +164,7 @@ def handle_dataType(self, element): # noqa return {type_name: val} -class _ComplexTypes(object): +class _ComplexTypes: @staticmethod def _get_tag_name(element): if '}' in element.tag: @@ -441,7 +440,7 @@ def handle_dataSize(self, element): # noqa return data_size -class TDSCatalogMetadata(object): +class TDSCatalogMetadata: """Hold information contained in the catalog Metadata tag. Attributes @@ -513,7 +512,7 @@ def _get_handler(self, handler_name): elif handler_name in self._sts: return getattr(self._st, handler_name) else: - msg = 'cannot find handler for element {}'.format(handler_name) + msg = f'cannot find handler for element {handler_name}' log.warning(msg) def _parse_element(self, element): diff --git a/src/siphon/ncss.py b/src/siphon/ncss.py index 451a423a3..71dbb2889 100644 --- a/src/siphon/ncss.py +++ b/src/siphon/ncss.py @@ -269,7 +269,7 @@ def vertical_level(self, level): # Use at your own risk! # -class ResponseRegistry(object): +class ResponseRegistry: """Register functions to be called based on the mimetype in the response headers.""" def __init__(self): diff --git a/src/siphon/ncss_dataset.py b/src/siphon/ncss_dataset.py index cb08a2367..2d39bc859 100644 --- a/src/siphon/ncss_dataset.py +++ b/src/siphon/ncss_dataset.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: BSD-3-Clause """Support reading and parsing the dataset.xml documents from the netCDF Subset Service.""" -from __future__ import print_function import logging import re @@ -21,7 +20,7 @@ def _without_namespace(tagname): return tagname -class _Types(object): +class _Types: @staticmethod def handle_typed_values(val, type_name, value_type): """Translate typed values into the appropriate python object. @@ -201,11 +200,11 @@ def lookup(self, handler_name): if handler_name in dir(self): return getattr(self, handler_name) else: - msg = 'cannot find handler for element {}'.format(handler_name) + msg = f'cannot find handler for element {handler_name}' log.warning(msg) -class NCSSDataset(object): +class NCSSDataset: """Hold information contained in the dataset.xml NCSS document. In general, if a dataset.xml NCSS document is missing the information diff --git a/src/siphon/radarserver.py b/src/siphon/radarserver.py index 2f5c7fcd7..6f3fe666b 100644 --- a/src/siphon/radarserver.py +++ b/src/siphon/radarserver.py @@ -75,7 +75,7 @@ def __init__(self, url): xmlfile = '/dataset.xml' if url.endswith(xmlfile): url = url[:-len(xmlfile)] - super(RadarServer, self).__init__(url) + super().__init__(url) def _get_metadata(self): ds_cat = TDSCatalog(self.url_path('dataset.xml')) diff --git a/src/siphon/simplewebservice/iastate.py b/src/siphon/simplewebservice/iastate.py index 3d31dddc8..1b54b52e0 100644 --- a/src/siphon/simplewebservice/iastate.py +++ b/src/siphon/simplewebservice/iastate.py @@ -22,7 +22,7 @@ class IAStateUpperAir(HTTPEndPoint): def __init__(self): """Set up endpoint.""" - super(IAStateUpperAir, self).__init__('http://mesonet.agron.iastate.edu/json') + super().__init__('http://mesonet.agron.iastate.edu/json') @classmethod def request_data(cls, time, site_id, interp_nans=False, **kwargs): @@ -176,11 +176,11 @@ def _get_data_raw(self, time, site_id, pressure=None): if not (json_data['profiles'] and json_data['profiles'][0]['profile']): message = 'No data available ' if time is not None: - message += 'for {time:%Y-%m-%d %HZ} '.format(time=time) + message += f'for {time:%Y-%m-%d %HZ} ' if site_id is not None: - message += 'for station {stid}'.format(stid=site_id) + message += f'for station {site_id}' if pressure is not None: - message += 'for pressure {pres}'.format(pres=pressure) + message += f'for pressure {pressure}' message = message + '.' raise ValueError(message) return json_data diff --git a/src/siphon/simplewebservice/igra2.py b/src/siphon/simplewebservice/igra2.py index 4a99f8cb0..673ecae82 100644 --- a/src/siphon/simplewebservice/igra2.py +++ b/src/siphon/simplewebservice/igra2.py @@ -30,7 +30,7 @@ def __init__(self): self.end_date = '' self.site_id = '' self.folder = '' - super(IGRAUpperAir, self).__init__('https://www1.ncdc.noaa.gov/pub/data/igra/') + super().__init__('https://www1.ncdc.noaa.gov/pub/data/igra/') @classmethod def request_data(cls, time, site_id, derived=False): diff --git a/src/siphon/simplewebservice/ndbc.py b/src/siphon/simplewebservice/ndbc.py index aca3e8c21..9e19476ae 100644 --- a/src/siphon/simplewebservice/ndbc.py +++ b/src/siphon/simplewebservice/ndbc.py @@ -20,7 +20,7 @@ class NDBC(HTTPEndPoint): def __init__(self): """Set up endpoint.""" - super(NDBC, self).__init__('https://www.ndbc.noaa.gov/') + super().__init__('https://www.ndbc.noaa.gov/') @classmethod def realtime_observations(cls, buoy, data_type='txt'): @@ -552,5 +552,5 @@ def raw_buoy_data(cls, buoy, data_type='txt'): """ endpoint = cls() - resp = endpoint.get_path('data/realtime2/{}.{}'.format(buoy, data_type)) + resp = endpoint.get_path(f'data/realtime2/{buoy}.{data_type}') return resp.text diff --git a/src/siphon/simplewebservice/wyoming.py b/src/siphon/simplewebservice/wyoming.py index 4e7fa0d67..25761f952 100644 --- a/src/siphon/simplewebservice/wyoming.py +++ b/src/siphon/simplewebservice/wyoming.py @@ -23,7 +23,7 @@ class WyomingUpperAir(HTTPEndPoint): def __init__(self): """Set up endpoint.""" - super(WyomingUpperAir, self).__init__('http://weather.uwyo.edu/cgi-bin/sounding') + super().__init__('http://weather.uwyo.edu/cgi-bin/sounding') @classmethod def request_data(cls, time, site_id, **kwargs): diff --git a/tests/test_http_util.py b/tests/test_http_util.py index 579d37de4..188301f03 100644 --- a/tests/test_http_util.py +++ b/tests/test_http_util.py @@ -169,7 +169,7 @@ def test_http_error_no_header(): endpoint.get_query(query) -class TestEndPoint(object): +class TestEndPoint: """Test the HTTPEndPoint.""" def setup(self): diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 4db57e8c6..db8b64c25 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -57,7 +57,7 @@ # element_name="timeCoverage" -class TestSimpleTypes(object): +class TestSimpleTypes: """Test parsing of simple types from metadata.""" @classmethod @@ -96,7 +96,7 @@ def test_data_type(self): assert expected == val -class TestComplexTypes(object): +class TestComplexTypes: """Test parsing of complex types from metadata.""" @classmethod @@ -276,7 +276,7 @@ def test_data_size(self): assert actual['size'] == 123 -class TestProperty(object): +class TestProperty: """Test parsing of property tags.""" @classmethod @@ -318,7 +318,7 @@ def test_contributor_for_role_not_empty(self): assert entry -class TestGeospatialCoverage(object): +class TestGeospatialCoverage: """Test parsing GeospatialCoverage tags.""" @classmethod @@ -374,7 +374,7 @@ def test_geospatial_coverage_attr2(self): assert entry['zpositive']['zpositive'] in {'up', 'down'} -class TestMetadata(object): +class TestMetadata: """Test parsing other metadata tags.""" @staticmethod diff --git a/tests/test_ncss.py b/tests/test_ncss.py index cf0d7bba4..77e871fcb 100644 --- a/tests/test_ncss.py +++ b/tests/test_ncss.py @@ -69,7 +69,7 @@ def tuple_unit_handler(data, units=None): return np.array(data).tolist(), units -class TestNCSS(object): +class TestNCSS: """Test NCSS queries and response parsing.""" server = 'http://thredds.ucar.edu/thredds/ncss/' diff --git a/tests/test_ncss_dataset.py b/tests/test_ncss_dataset.py index 81411289d..458dcfff5 100644 --- a/tests/test_ncss_dataset.py +++ b/tests/test_ncss_dataset.py @@ -39,7 +39,7 @@ # AcceptList -class TestSimpleTypes(object): +class TestSimpleTypes: """Test parsing simple types from NCSS dataset.xml.""" @classmethod diff --git a/tests/test_radarsever.py b/tests/test_radarsever.py index 8260cfa6b..58d4e5d90 100644 --- a/tests/test_radarsever.py +++ b/tests/test_radarsever.py @@ -38,7 +38,7 @@ def test_radar_query_chain(): assert 'time=2015-06-15T12' in query -class TestRadarServerLevel3(object): +class TestRadarServerLevel3: """Test radar server functionality for requesting level 3 data.""" @recorder.use_cassette('thredds_radarserver_level3_metadata') @@ -60,7 +60,7 @@ def test_invalid_variables(self): assert not self.client.validate_query(q) -class TestRadarServer(object): +class TestRadarServer: """Test radar server functionality for accessing data.""" @recorder.use_cassette('thredds_radarserver_metadata') @@ -131,7 +131,7 @@ def test_good_level3_query(self): assert len(cat.datasets) == 1 -class TestRadarServerDatasets(object): +class TestRadarServerDatasets: """Tests for listing datasets from the radar server.""" @recorder.use_cassette('thredds_radarserver_toplevel')