From 057f975d5cb4aefc04bd6c208f046e7890ebf270 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Wed, 22 Aug 2018 21:56:43 -0600 Subject: [PATCH 1/9] add 'master_file' kwarg to MFDataset.__init__ --- netCDF4/_netCDF4.pyx | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/netCDF4/_netCDF4.pyx b/netCDF4/_netCDF4.pyx index 5644e0323..c3b27117e 100644 --- a/netCDF4/_netCDF4.pyx +++ b/netCDF4/_netCDF4.pyx @@ -5821,7 +5821,8 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99] """ - def __init__(self, files, check=False, aggdim=None, exclude=[]): + def __init__(self, files, check=False, aggdim=None, exclude=[], + master_file=None): """ **`__init__(self, files, check=False, aggdim=None, exclude=[])`** @@ -5833,7 +5834,8 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): of the variables to be aggregated. **`files`**: either a sequence of netCDF files or a string with a - wildcard (converted to a sorted list of files using glob) The first file + wildcard (converted to a sorted list of files using glob) If + the `master_file` kwarg is not specified, the first file in the list will become the "master" file, defining all the variables with an aggregation dimension which may span subsequent files. Attribute access returns attributes only from "master" @@ -5850,6 +5852,9 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): **`exclude`**: A list of variable names to exclude from aggregation. Default is an empty list. + + **`master_file`**: file to use as "master file", defining all the + variables with an aggregation dimension and all global attributes. """ # Open the master file in the base class, so that the CDFMF instance @@ -5861,7 +5866,13 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): else: files = sorted(glob(files)) - master = files[0] + if master_file is not None: + if master_file not in files: + raise ValueError('master_file not in files list') + else: + master = master_file + else: + master = files[0] # Open the master again, this time as a classic CDF instance. This will avoid # calling methods of the CDFMF subclass when querying the master file. From 35ee3d61bbbf9bd623f9debf51302bbddc119b06 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 06:35:11 -0600 Subject: [PATCH 2/9] update --- Changelog | 1 + 1 file changed, 1 insertion(+) diff --git a/Changelog b/Changelog index dae4e5a6c..020e1458b 100644 --- a/Changelog +++ b/Changelog @@ -4,6 +4,7 @@ * make sure format keyword not ignored when mode is 'ws' (issue #827) * fix numpy FutureWarning (non-tuple sequence for multidimensional indexing is deprecated), issue #833. + * add 'master_file' kwarg to MFDataset.__init__ (issue #835). version 1.4.1 (tag v1.4.1rel) ============================= From 668a0f4d806f9edfe20298a40eb8127723dfe5cc Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 06:45:33 -0600 Subject: [PATCH 3/9] fix test --- test/tst_multifile.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/tst_multifile.py b/test/tst_multifile.py index 42061e6cc..be0e3f550 100644 --- a/test/tst_multifile.py +++ b/test/tst_multifile.py @@ -63,9 +63,7 @@ def runTest(self): assert_array_equal(varin[:],data2) assert getattr(varin,'nonexistantatt',None) == None f.close() - - def test_get_by_mfdataset(self): - """testing multi-file get_variables_by_attributes.""" + # testing multi-file get_variables_by_attributes f = MFDataset(self.files,check=True) assert f.get_variables_by_attributes(axis='T') == [] f.get_variables_by_attributes(units='zlotys')[0] == f['x'] From 8ac0974742cf6af586ff9a2057421fe3d884224e Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 08:44:08 -0600 Subject: [PATCH 4/9] fix order of variables when master_file is not first file --- netCDF4/_netCDF4.pyx | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/netCDF4/_netCDF4.pyx b/netCDF4/_netCDF4.pyx index c3b27117e..8ee18ca8b 100644 --- a/netCDF4/_netCDF4.pyx +++ b/netCDF4/_netCDF4.pyx @@ -5918,18 +5918,23 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): # cdfRecVar dictionary indexed by the aggregation var names; each key holds # a list of the corresponding Variable instance, one for each # cdf file of the file set - cdf = [cdfm] + cdf = [] self._cdf = cdf # Store this now, because dim() method needs it cdfVLen = [len(aggDimId)] cdfRecVar = {} - for v in masterRecVar.keys(): - cdfRecVar[v] = [cdfm.variables[v]] # Open each remaining file in read-only mode. # Make sure each file defines the same aggregation variables as the master # and that the variables are defined in the same way (name, shape and type) - for f in files[1:]: - part = Dataset(f) + for f in files: + if f == master: + part = cdfm + else: + part = Dataset(f) + if cdfRecVar == {}: + empty_cdfRecVar = True + else: + empty_cdfRecVar = False varInfo = part.variables for v in masterRecVar.keys(): if check: @@ -5968,12 +5973,16 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): (v, master, masterType, f, extType)) # Everything ok. - vInst = part.variables[v] - cdfRecVar[v].append(vInst) + if empty_cdfRecVar: + cdfRecVar[v] = [part.variables[v]] + else: + cdfRecVar[v].append(part.variables[v]) else: # No making sure of anything -- assume this is ok.. - vInst = part.variables[v] - cdfRecVar[v].append(vInst) + if empty_cdfRecVar: + cdfRecVar[v] = [part.variables[v]] + else: + cdfRecVar[v].append(part.variables[v]) cdf.append(part) cdfVLen.append(len(part.dimensions[aggDimName])) From 0da3e22f10845888353ea042d9336f507e3afc6e Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 13:20:01 -0600 Subject: [PATCH 5/9] update --- netCDF4/_netCDF4.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netCDF4/_netCDF4.pyx b/netCDF4/_netCDF4.pyx index 8ee18ca8b..e61aa502e 100644 --- a/netCDF4/_netCDF4.pyx +++ b/netCDF4/_netCDF4.pyx @@ -5920,7 +5920,7 @@ Example usage (See `netCDF4.MFDataset.__init__` for more details): # cdf file of the file set cdf = [] self._cdf = cdf # Store this now, because dim() method needs it - cdfVLen = [len(aggDimId)] + cdfVLen = [] cdfRecVar = {} # Open each remaining file in read-only mode. From 4c15818798596621753cfd9ae33d9775f70b3ae9 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 13:25:43 -0600 Subject: [PATCH 6/9] add test for master_file kwarg --- test/tst_multifile.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/tst_multifile.py b/test/tst_multifile.py index be0e3f550..412721e5c 100644 --- a/test/tst_multifile.py +++ b/test/tst_multifile.py @@ -63,6 +63,12 @@ def runTest(self): assert_array_equal(varin[:],data2) assert getattr(varin,'nonexistantatt',None) == None f.close() + # test master_file kwarg (issue #835). + f = MFDataset(self.files,master_file=self.files[-1],check=True) + assert_array_equal(np.arange(0,nx),f.variables['x'][:]) + varin = f.variables['data'] + assert_array_equal(varin[4:-4:4,3:5,2:8],data2[4:-4:4,3:5,2:8]) + f.close() # testing multi-file get_variables_by_attributes f = MFDataset(self.files,check=True) assert f.get_variables_by_attributes(axis='T') == [] From b43c1c67ff7ccc31fea4754855042380afe71804 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Thu, 23 Aug 2018 13:43:27 -0600 Subject: [PATCH 7/9] update html docs --- docs/netCDF4/index.html | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/docs/netCDF4/index.html b/docs/netCDF4/index.html index b8029100c..5c9cd0051 100644 --- a/docs/netCDF4/index.html +++ b/docs/netCDF4/index.html @@ -4,7 +4,7 @@ netCDF4 API documentation - endian
  • filters
  • getValue
  • +
  • get_dims
  • get_var_chunk_cache
  • getncattr
  • group
  • @@ -1268,7 +1269,7 @@

    Index

    netCDF4 module

    -

    Version 1.4.1

    +

    Version 1.4.2


    Introduction

    netcdf4-python is a Python interface to the netCDF C library.

    @@ -4678,7 +4679,7 @@

    Static methods

    -

    def __init__(

    self, files, check=False, aggdim=None, exclude=[])

    +

    def __init__(

    self, files, check=False, aggdim=None, exclude=[], master_file=None)

    @@ -4696,7 +4697,8 @@

    Static methods

    aggdim must be the leftmost (slowest varying) dimension of each of the variables to be aggregated.

    files: either a sequence of netCDF files or a string with a -wildcard (converted to a sorted list of files using glob) The first file +wildcard (converted to a sorted list of files using glob) If +the master_file kwarg is not specified, the first file in the list will become the "master" file, defining all the variables with an aggregation dimension which may span subsequent files. Attribute access returns attributes only from "master" @@ -4709,7 +4711,9 @@

    Static methods

    be the leftmost dimension of each of the variables to be aggregated). If None (default), aggregate over the unlimited dimension.

    exclude: A list of variable names to exclude from aggregation. -Default is an empty list.

    +Default is an empty list.

    +

    master_file: file to use as "master file", defining all the +variables with an aggregation dimension and all global attributes.

    @@ -5940,6 +5944,22 @@

    Static methods

    +
    +
    +

    def get_dims(

    self)

    +
    + + + + +

    return a tuple of Dimension instances associated with this +`netCDF4.Variable.

    +
    +
    + +
    + +

    def get_var_chunk_cache(

    self)

    From 70f8faf064a113cdb7dd2b2dc5da29706b7aef99 Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Fri, 24 Aug 2018 06:40:52 -0600 Subject: [PATCH 8/9] update --- netCDF4/_netCDF4.pyx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/netCDF4/_netCDF4.pyx b/netCDF4/_netCDF4.pyx index e61aa502e..1f363605d 100644 --- a/netCDF4/_netCDF4.pyx +++ b/netCDF4/_netCDF4.pyx @@ -5803,14 +5803,14 @@ Adapted from [pycdf](http://pysclint.sourceforge.net/pycdf) by Andre Gosselin. Example usage (See `netCDF4.MFDataset.__init__` for more details): :::python - >>> import numpy + >>> import numpy as np >>> # create a series of netCDF files with a variable sharing >>> # the same unlimited dimension. >>> for nf in range(10): - >>> f = Dataset("mftest%s.nc" % nf,"w") + >>> f = Dataset("mftest%s.nc" % nf,"w",format='NETCDF4_CLASSIC') >>> f.createDimension("x",None) >>> x = f.createVariable("x","i",("x",)) - >>> x[0:10] = numpy.arange(nf*10,10*(nf+1)) + >>> x[0:10] = np.arange(nf*10,10*(nf+1)) >>> f.close() >>> # now read all those files in at once, in one Dataset. >>> f = MFDataset("mftest*nc") From e24b21abdc155d47b9cd58b9458799812b2f128f Mon Sep 17 00:00:00 2001 From: Jeff Whitaker Date: Fri, 24 Aug 2018 06:45:26 -0600 Subject: [PATCH 9/9] update docs --- docs/netCDF4/index.html | 1223 ++++++++++++++++++++++++++------------- 1 file changed, 809 insertions(+), 414 deletions(-) diff --git a/docs/netCDF4/index.html b/docs/netCDF4/index.html index 5c9cd0051..6ff71385f 100644 --- a/docs/netCDF4/index.html +++ b/docs/netCDF4/index.html @@ -865,8 +865,10 @@ .codehilite .err { border: 1px solid #FF0000 } /* Error */ .codehilite .k { color: #008000; font-weight: bold } /* Keyword */ .codehilite .o { color: #666666 } /* Operator */ +.codehilite .ch { color: #408080; font-style: italic } /* Comment.Hashbang */ .codehilite .cm { color: #408080; font-style: italic } /* Comment.Multiline */ .codehilite .cp { color: #BC7A00 } /* Comment.Preproc */ +.codehilite .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */ .codehilite .c1 { color: #408080; font-style: italic } /* Comment.Single */ .codehilite .cs { color: #408080; font-style: italic } /* Comment.Special */ .codehilite .gd { color: #A00000 } /* Generic.Deleted */ @@ -906,8 +908,10 @@ .codehilite .mh { color: #666666 } /* Literal.Number.Hex */ .codehilite .mi { color: #666666 } /* Literal.Number.Integer */ .codehilite .mo { color: #666666 } /* Literal.Number.Oct */ +.codehilite .sa { color: #BA2121 } /* Literal.String.Affix */ .codehilite .sb { color: #BA2121 } /* Literal.String.Backtick */ .codehilite .sc { color: #BA2121 } /* Literal.String.Char */ +.codehilite .dl { color: #BA2121 } /* Literal.String.Delimiter */ .codehilite .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ .codehilite .s2 { color: #BA2121 } /* Literal.String.Double */ .codehilite .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */ @@ -918,9 +922,11 @@ .codehilite .s1 { color: #BA2121 } /* Literal.String.Single */ .codehilite .ss { color: #19177C } /* Literal.String.Symbol */ .codehilite .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.codehilite .fm { color: #0000FF } /* Name.Function.Magic */ .codehilite .vc { color: #19177C } /* Name.Variable.Class */ .codehilite .vg { color: #19177C } /* Name.Variable.Global */ .codehilite .vi { color: #19177C } /* Name.Variable.Instance */ +.codehilite .vm { color: #19177C } /* Name.Variable.Magic */ .codehilite .il { color: #666666 } /* Literal.Number.Integer.Long */ @@ -1166,8 +1172,6 @@

    Index

    @@ -1400,8 +1406,8 @@

    1) Creating/Opening/Closing a netCDF file.

    accomplished via the close method of the Dataset instance.

    Here's an example:

    -
    >>> from netCDF4 import Dataset
    ->>> rootgrp = Dataset("test.nc", "w", format="NETCDF4")
    +
    >>> from netCDF4 import Dataset
    +>>> rootgrp = Dataset("test.nc", "w", format="NETCDF4")
     >>> print rootgrp.data_model
     NETCDF4
     >>> rootgrp.close()
    @@ -1427,13 +1433,13 @@ 

    2) Groups in a netCDF file.

    the groups dictionary attribute of the Dataset instance. Only NETCDF4 formatted files support Groups, if you try to create a Group in a netCDF 3 file you will get an error message.

    -
    >>> rootgrp = Dataset("test.nc", "a")
    ->>> fcstgrp = rootgrp.createGroup("forecasts")
    ->>> analgrp = rootgrp.createGroup("analyses")
    +
    >>> rootgrp = Dataset("test.nc", "a")
    +>>> fcstgrp = rootgrp.createGroup("forecasts")
    +>>> analgrp = rootgrp.createGroup("analyses")
     >>> print rootgrp.groups
    -OrderedDict([("forecasts", 
    +OrderedDict([("forecasts", 
                   <netCDF4._netCDF4.Group object at 0x1b4b7b0>),
    -             ("analyses", 
    +             ("analyses", 
                   <netCDF4._netCDF4.Group object at 0x1b4b970>)])
     
    @@ -1445,8 +1451,8 @@

    2) Groups in a netCDF file.

    path attribute that contains a simulated unix directory path to that group. To simplify the creation of nested groups, you can use a unix-like path as an argument to createGroup.

    -
    >>> fcstgrp1 = rootgrp.createGroup("/forecasts/model1")
    ->>> fcstgrp2 = rootgrp.createGroup("/forecasts/model2")
    +
    >>> fcstgrp1 = rootgrp.createGroup("/forecasts/model1")
    +>>> fcstgrp2 = rootgrp.createGroup("/forecasts/model2")
     
    @@ -1458,7 +1464,7 @@

    2) Groups in a netCDF file.

    Dataset. The function walktree is a Python generator that is used to walk the directory tree. Note that printing the Dataset or Group object yields summary information about it's contents.

    -
    >>> def walktree(top):
    +
    >>> def walktree(top):
     >>>     values = top.groups.values()
     >>>     yield values
     >>>     for value in top.groups.values():
    @@ -1468,27 +1474,27 @@ 

    2) Groups in a netCDF file.

    >>> for children in walktree(rootgrp): >>> for child in children: >>> print child -<type "netCDF4._netCDF4.Dataset"> +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: variables: groups: forecasts, analyses -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts: dimensions: variables: groups: model1, model2 -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /analyses: dimensions: variables: groups: -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts/model1: dimensions: variables: groups: -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts/model2: dimensions: variables: @@ -1509,19 +1515,19 @@

    3) Dimensions in a netCDF file.

    level dimensions are unlimited. Having more than one unlimited dimension is a new netCDF 4 feature, in netCDF 3 files there may be only one, and it must be the first (leftmost) dimension of the variable.

    -
    >>> level = rootgrp.createDimension("level", None)
    ->>> time = rootgrp.createDimension("time", None)
    ->>> lat = rootgrp.createDimension("lat", 73)
    ->>> lon = rootgrp.createDimension("lon", 144)
    +
    >>> level = rootgrp.createDimension("level", None)
    +>>> time = rootgrp.createDimension("time", None)
    +>>> lat = rootgrp.createDimension("lat", 73)
    +>>> lon = rootgrp.createDimension("lon", 144)
     

    All of the Dimension instances are stored in a python dictionary.

    -
    >>> print rootgrp.dimensions
    -OrderedDict([("level", <netCDF4._netCDF4.Dimension object at 0x1b48030>),
    -             ("time", <netCDF4._netCDF4.Dimension object at 0x1b481c0>),
    -             ("lat", <netCDF4._netCDF4.Dimension object at 0x1b480f8>),
    -             ("lon", <netCDF4._netCDF4.Dimension object at 0x1b48a08>)])
    +
    >>> print rootgrp.dimensions
    +OrderedDict([("level", <netCDF4._netCDF4.Dimension object at 0x1b48030>),
    +             ("time", <netCDF4._netCDF4.Dimension object at 0x1b481c0>),
    +             ("lat", <netCDF4._netCDF4.Dimension object at 0x1b480f8>),
    +             ("lon", <netCDF4._netCDF4.Dimension object at 0x1b48a08>)])
     
    @@ -1529,7 +1535,7 @@

    3) Dimensions in a netCDF file.

    the current size of that dimension. The isunlimited method of a Dimension instance can be used to determine if the dimensions is unlimited, or appendable.

    -
    >>> print len(lon)
    +
    >>> print len(lon)
     144
     >>> print lon.isunlimited()
     False
    @@ -1541,13 +1547,13 @@ 

    3) Dimensions in a netCDF file.

    Printing the Dimension object provides useful summary info, including the name and length of the dimension, and whether it is unlimited.

    -
    >>> for dimobj in rootgrp.dimensions.values():
    +
    >>> for dimobj in rootgrp.dimensions.values():
     >>>    print dimobj
    -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "level", size = 0
    -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0
    -<type "netCDF4._netCDF4.Dimension">: name = "lat", size = 73
    -<type "netCDF4._netCDF4.Dimension">: name = "lon", size = 144
    -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0
    +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "level", size = 0
    +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0
    +<type "netCDF4._netCDF4.Dimension">: name = "lat", size = 73
    +<type "netCDF4._netCDF4.Dimension">: name = "lon", size = 144
    +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0
     
    @@ -1584,18 +1590,18 @@

    4) Variables in a netCDF file.

    coordinate variables. The createVariable method returns an instance of the Variable class whose methods can be used later to access and set variable data and attributes.

    -
    >>> times = rootgrp.createVariable("time","f8",("time",))
    ->>> levels = rootgrp.createVariable("level","i4",("level",))
    ->>> latitudes = rootgrp.createVariable("lat","f4",("lat",))
    ->>> longitudes = rootgrp.createVariable("lon","f4",("lon",))
    ->>> # two dimensions unlimited
    ->>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))
    +
    >>> times = rootgrp.createVariable("time","f8",("time",))
    +>>> levels = rootgrp.createVariable("level","i4",("level",))
    +>>> latitudes = rootgrp.createVariable("lat","f4",("lat",))
    +>>> longitudes = rootgrp.createVariable("lon","f4",("lon",))
    +>>> # two dimensions unlimited
    +>>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))
     

    To get summary info on a Variable instance in an interactive session, just print it.

    -
    >>> print temp
    -<type "netCDF4._netCDF4.Variable">
    +
    >>> print temp
    +<type "netCDF4._netCDF4.Variable">
     float32 temp(time, level, lat, lon)
         least_significant_digit: 3
         units: K
    @@ -1605,21 +1611,21 @@ 

    4) Variables in a netCDF file.

    You can use a path to create a Variable inside a hierarchy of groups.

    -
    >>> ftemp = rootgrp.createVariable("/forecasts/model1/temp","f4",("time","level","lat","lon",))
    +
    >>> ftemp = rootgrp.createVariable("/forecasts/model1/temp","f4",("time","level","lat","lon",))
     

    If the intermediate groups do not yet exist, they will be created.

    You can also query a Dataset or Group instance directly to obtain Group or Variable instances using paths.

    -
    >>> print rootgrp["/forecasts/model1"] # a Group instance
    -<type "netCDF4._netCDF4.Group">
    +
    >>> print rootgrp["/forecasts/model1"] # a Group instance
    +<type "netCDF4._netCDF4.Group">
     group /forecasts/model1:
         dimensions(sizes):
         variables(dimensions): float32 temp(time,level,lat,lon)
         groups:
    ->>> print rootgrp["/forecasts/model1/temp"] # a Variable instance
    -<type "netCDF4._netCDF4.Variable">
    +>>> print rootgrp["/forecasts/model1/temp"] # a Variable instance
    +<type "netCDF4._netCDF4.Variable">
     float32 temp(time, level, lat, lon)
     path = /forecasts/model1
     unlimited dimensions: time, level
    @@ -1630,12 +1636,12 @@ 

    4) Variables in a netCDF file.

    All of the variables in the Dataset or Group are stored in a Python dictionary, in the same way as the dimensions:

    -
    >>> print rootgrp.variables
    -OrderedDict([("time", <netCDF4.Variable object at 0x1b4ba70>),
    -             ("level", <netCDF4.Variable object at 0x1b4bab0>),
    -             ("lat", <netCDF4.Variable object at 0x1b4baf0>),
    -             ("lon", <netCDF4.Variable object at 0x1b4bb30>),
    -             ("temp", <netCDF4.Variable object at 0x1b4bb70>)])
    +
    >>> print rootgrp.variables
    +OrderedDict([("time", <netCDF4.Variable object at 0x1b4ba70>),
    +             ("level", <netCDF4.Variable object at 0x1b4bab0>),
    +             ("lat", <netCDF4.Variable object at 0x1b4baf0>),
    +             ("lon", <netCDF4.Variable object at 0x1b4bb30>),
    +             ("temp", <netCDF4.Variable object at 0x1b4bb70>)])
     
    @@ -1651,16 +1657,16 @@

    5) Attributes in a netCDF file.

    attributes are set by assigning values to Variable instances variables. Attributes can be strings, numbers or sequences. Returning to our example,

    -
    >>> import time
    ->>> rootgrp.description = "bogus example script"
    ->>> rootgrp.history = "Created " + time.ctime(time.time())
    ->>> rootgrp.source = "netCDF4 python module tutorial"
    ->>> latitudes.units = "degrees north"
    ->>> longitudes.units = "degrees east"
    ->>> levels.units = "hPa"
    ->>> temp.units = "K"
    ->>> times.units = "hours since 0001-01-01 00:00:00.0"
    ->>> times.calendar = "gregorian"
    +
    >>> import time
    +>>> rootgrp.description = "bogus example script"
    +>>> rootgrp.history = "Created " + time.ctime(time.time())
    +>>> rootgrp.source = "netCDF4 python module tutorial"
    +>>> latitudes.units = "degrees north"
    +>>> longitudes.units = "degrees east"
    +>>> levels.units = "hPa"
    +>>> temp.units = "K"
    +>>> times.units = "hours since 0001-01-01 00:00:00.0"
    +>>> times.calendar = "gregorian"
     
    @@ -1669,8 +1675,8 @@

    5) Attributes in a netCDF file.

    attributes. This method is provided as a convenience, since using the built-in dir Python function will return a bunch of private methods and attributes that cannot (or should not) be modified by the user.

    -
    >>> for name in rootgrp.ncattrs():
    ->>>     print "Global attr", name, "=", getattr(rootgrp,name)
    +
    >>> for name in rootgrp.ncattrs():
    +>>>     print "Global attr", name, "=", getattr(rootgrp,name)
     Global attr description = bogus example script
     Global attr history = Created Mon Nov  7 10.30:56 2005
     Global attr source = netCDF4 python module tutorial
    @@ -1680,10 +1686,10 @@ 

    5) Attributes in a netCDF file.

    The __dict__ attribute of a Dataset, Group or Variable instance provides all the netCDF attribute name/value pairs in a python dictionary:

    -
    >>> print rootgrp.__dict__
    -OrderedDict([(u"description", u"bogus example script"),
    -             (u"history", u"Created Thu Mar  3 19:30:33 2011"),
    -             (u"source", u"netCDF4 python module tutorial")])
    +
    >>> print rootgrp.__dict__
    +OrderedDict([(u"description", u"bogus example script"),
    +             (u"history", u"Created Thu Mar  3 19:30:33 2011"),
    +             (u"source", u"netCDF4 python module tutorial")])
     
    @@ -1693,12 +1699,12 @@

    5) Attributes in a netCDF file.

    6) Writing data to and retrieving data from a netCDF variable.

    Now that you have a netCDF Variable instance, how do you put data into it? You can just treat it like an array and assign data to a slice.

    -
    >>> import numpy
    +
    >>> import numpy
     >>> lats =  numpy.arange(-90,91,2.5)
     >>> lons =  numpy.arange(-180,180,2.5)
     >>> latitudes[:] = lats
     >>> longitudes[:] = lons
    ->>> print "latitudes =\n",latitudes[:]
    +>>> print "latitudes =\n",latitudes[:]
     latitudes =
     [-90.  -87.5 -85.  -82.5 -80.  -77.5 -75.  -72.5 -70.  -67.5 -65.  -62.5
      -60.  -57.5 -55.  -52.5 -50.  -47.5 -45.  -42.5 -40.  -37.5 -35.  -32.5
    @@ -1713,19 +1719,19 @@ 

    6) Writing data to and retrieving data from a netCDF vari

    Unlike NumPy's array objects, netCDF Variable objects with unlimited dimensions will grow along those dimensions if you assign data outside the currently defined range of indices.

    -
    >>> # append along two unlimited dimensions by assigning to slice.
    ->>> nlats = len(rootgrp.dimensions["lat"])
    ->>> nlons = len(rootgrp.dimensions["lon"])
    ->>> print "temp shape before adding data = ",temp.shape
    +
    >>> # append along two unlimited dimensions by assigning to slice.
    +>>> nlats = len(rootgrp.dimensions["lat"])
    +>>> nlons = len(rootgrp.dimensions["lon"])
    +>>> print "temp shape before adding data = ",temp.shape
     temp shape before adding data =  (0, 0, 73, 144)
     >>>
     >>> from numpy.random import uniform
     >>> temp[0:5,0:10,:,:] = uniform(size=(5,10,nlats,nlons))
    ->>> print "temp shape after adding data = ",temp.shape
    +>>> print "temp shape after adding data = ",temp.shape
     temp shape after adding data =  (6, 10, 73, 144)
     >>>
    ->>> # levels have grown, but no values yet assigned.
    ->>> print "levels shape after adding pressure data = ",levels.shape
    +>>> # levels have grown, but no values yet assigned.
    +>>> print "levels shape after adding pressure data = ",levels.shape
     levels shape after adding pressure data =  (10,)
     
    @@ -1733,7 +1739,7 @@

    6) Writing data to and retrieving data from a netCDF vari

    Note that the size of the levels variable grows when data is appended along the level dimension of the variable temp, even though no data has yet been assigned to levels.

    -
    >>> # now, assign data to levels dimension variable.
    +
    >>> # now, assign data to levels dimension variable.
     >>> levels[:] =  [1000.,850.,700.,500.,300.,250.,200.,150.,100.,50.]
     
    @@ -1746,7 +1752,7 @@

    6) Writing data to and retrieving data from a netCDF vari than for numpy arrays. Only 1-d boolean arrays and integer sequences are allowed, and these indices work independently along each dimension (similar to the way vector subscripts work in fortran). This means that

    -
    >>> temp[0, 0, [0,1,2,3], [0,1,2,3]]
    +
    >>> temp[0, 0, [0,1,2,3], [0,1,2,3]]
     
    @@ -1762,14 +1768,14 @@

    6) Writing data to and retrieving data from a netCDF vari it provides a very powerful way to extract data from multidimensional netCDF variables by using logical operations on the dimension arrays to create slices.

    For example,

    -
    >>> tempdat = temp[::2, [1,3,6], lats>0, lons>0]
    +
    >>> tempdat = temp[::2, [1,3,6], lats>0, lons>0]
     

    will extract time indices 0,2 and 4, pressure levels 850, 500 and 200 hPa, all Northern Hemisphere latitudes and Eastern Hemisphere longitudes, resulting in a numpy array of shape (3, 3, 36, 71).

    -
    >>> print "shape of fancy temp slice = ",tempdat.shape
    +
    >>> print "shape of fancy temp slice = ",tempdat.shape
     shape of fancy temp slice =  (3, 3, 36, 71)
     
    @@ -1799,16 +1805,16 @@

    7) Dealing with time coordinates.

    cftime package must be installed separately). Here's an example of how they can be used:

    -
    >>> # fill in times.
    +
    >>> # fill in times.
     >>> from datetime import datetime, timedelta
     >>> from netCDF4 import num2date, date2num
     >>> dates = [datetime(2001,3,1)+n*timedelta(hours=12) for n in range(temp.shape[0])]
     >>> times[:] = date2num(dates,units=times.units,calendar=times.calendar)
    ->>> print "time values (in units %s): " % times.units+"\n",times[:]
    +>>> print "time values (in units %s): " % times.units+"\n",times[:]
     time values (in units hours since January 1, 0001):
     [ 17533056.  17533068.  17533080.  17533092.  17533104.]
     >>> dates = num2date(times[:],units=times.units,calendar=times.calendar)
    ->>> print "dates corresponding to time values:\n",dates
    +>>> print "dates corresponding to time values:\n",dates
     dates corresponding to time values:
     [2001-03-01 00:00:00 2001-03-01 12:00:00 2001-03-02 00:00:00
      2001-03-02 12:00:00 2001-03-03 00:00:00]
    @@ -1835,19 +1841,19 @@ 

    8) Reading data from a multi-file netCDF dataset.

    must in be in NETCDF3_64BIT_OFFSET, NETCDF3_64BIT_DATA, NETCDF3_CLASSIC or NETCDF4_CLASSIC format (NETCDF4 formatted multi-file datasets are not supported).

    -
    >>> for nf in range(10):
    ->>>     f = Dataset("mftest%s.nc" % nf,"w")
    ->>>     f.createDimension("x",None)
    ->>>     x = f.createVariable("x","i",("x",))
    +
    >>> for nf in range(10):
    +>>>     f = Dataset("mftest%s.nc" % nf,"w")
    +>>>     f.createDimension("x",None)
    +>>>     x = f.createVariable("x","i",("x",))
     >>>     x[0:10] = numpy.arange(nf*10,10*(nf+1))
     >>>     f.close()
     

    Now read all the files back in at once with MFDataset

    -
    >>> from netCDF4 import MFDataset
    ->>> f = MFDataset("mftest*nc")
    ->>> print f.variables["x"][:]
    +
    >>> from netCDF4 import MFDataset
    +>>> f = MFDataset("mftest*nc")
    +>>> print f.variables["x"][:]
     [ 0  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
      25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
      50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    @@ -1893,17 +1899,17 @@ 

    9) Efficient compression of netCDF variables.

    'lossy' instead of 'lossless', that is some precision in the data is sacrificed for the sake of disk space.

    In our example, try replacing the line

    -
    >>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))
    +
    >>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))
     

    with

    -
    >>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True)
    +
    >>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True)
     

    and then

    -
    >>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True,least_significant_digit=3)
    +
    >>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True,least_significant_digit=3)
     
    @@ -1922,31 +1928,31 @@

    10) Beyond homogeneous arrays of a fixed type - compound createCompoundType method of a Dataset or Group instance. Since there is no native complex data type in netcdf, compound types are handy for storing numpy complex arrays. Here's an example:

    -
    >>> f = Dataset("complex.nc","w")
    ->>> size = 3 # length of 1-d complex array
    ->>> # create sample complex data.
    +
    >>> f = Dataset("complex.nc","w")
    +>>> size = 3 # length of 1-d complex array
    +>>> # create sample complex data.
     >>> datac = numpy.exp(1j*(1.+numpy.linspace(0, numpy.pi, size)))
    ->>> # create complex128 compound data type.
    ->>> complex128 = numpy.dtype([("real",numpy.float64),("imag",numpy.float64)])
    ->>> complex128_t = f.createCompoundType(complex128,"complex128")
    ->>> # create a variable with this data type, write some data to it.
    ->>> f.createDimension("x_dim",None)
    ->>> v = f.createVariable("cmplx_var",complex128_t,"x_dim")
    ->>> data = numpy.empty(size,complex128) # numpy structured array
    ->>> data["real"] = datac.real; data["imag"] = datac.imag
    ->>> v[:] = data # write numpy structured array to netcdf compound var
    ->>> # close and reopen the file, check the contents.
    ->>> f.close(); f = Dataset("complex.nc")
    ->>> v = f.variables["cmplx_var"]
    ->>> datain = v[:] # read in all the data into a numpy structured array
    ->>> # create an empty numpy complex array
    +>>> # create complex128 compound data type.
    +>>> complex128 = numpy.dtype([("real",numpy.float64),("imag",numpy.float64)])
    +>>> complex128_t = f.createCompoundType(complex128,"complex128")
    +>>> # create a variable with this data type, write some data to it.
    +>>> f.createDimension("x_dim",None)
    +>>> v = f.createVariable("cmplx_var",complex128_t,"x_dim")
    +>>> data = numpy.empty(size,complex128) # numpy structured array
    +>>> data["real"] = datac.real; data["imag"] = datac.imag
    +>>> v[:] = data # write numpy structured array to netcdf compound var
    +>>> # close and reopen the file, check the contents.
    +>>> f.close(); f = Dataset("complex.nc")
    +>>> v = f.variables["cmplx_var"]
    +>>> datain = v[:] # read in all the data into a numpy structured array
    +>>> # create an empty numpy complex array
     >>> datac2 = numpy.empty(datain.shape,numpy.complex128)
    ->>> # .. fill it with contents of structured array.
    ->>> datac2.real = datain["real"]; datac2.imag = datain["imag"]
    ->>> print datac.dtype,datac # original data
    +>>> # .. fill it with contents of structured array.
    +>>> datac2.real = datain["real"]; datac2.imag = datain["imag"]
    +>>> print datac.dtype,datac # original data
     complex128 [ 0.54030231+0.84147098j -0.84147098+0.54030231j  -0.54030231-0.84147098j]
     >>>
    ->>> print datac2.dtype,datac2 # data from file
    +>>> print datac2.dtype,datac2 # data from file
     complex128 [ 0.54030231+0.84147098j -0.84147098+0.54030231j  -0.54030231-0.84147098j]
     
    @@ -1958,22 +1964,22 @@

    10) Beyond homogeneous arrays of a fixed type - compound All of the compound types defined for a Dataset or Group are stored in a Python dictionary, just like variables and dimensions. As always, printing objects gives useful summary information in an interactive session:

    -
    >>> print f
    -<type "netCDF4._netCDF4.Dataset">
    +
    >>> print f
    +<type "netCDF4._netCDF4.Dataset">
     root group (NETCDF4 file format):
         dimensions: x_dim
         variables: cmplx_var
         groups:
    -<type "netCDF4._netCDF4.Variable">
    ->>> print f.variables["cmplx_var"]
    +<type "netCDF4._netCDF4.Variable">
    +>>> print f.variables["cmplx_var"]
     compound cmplx_var(x_dim)
    -compound data type: [("real", "<f8"), ("imag", "<f8")]
    +compound data type: [("real", "<f8"), ("imag", "<f8")]
     unlimited dimensions: x_dim
     current shape = (3,)
     >>> print f.cmptypes
    -OrderedDict([("complex128", <netCDF4.CompoundType object at 0x1029eb7e8>)])
    ->>> print f.cmptypes["complex128"]
    -<type "netCDF4._netCDF4.CompoundType">: name = "complex128", numpy dtype = [(u"real","<f8"), (u"imag", "<f8")]
    +OrderedDict([("complex128", <netCDF4.CompoundType object at 0x1029eb7e8>)])
    +>>> print f.cmptypes["complex128"]
    +<type "netCDF4._netCDF4.CompoundType">: name = "complex128", numpy dtype = [(u"real","<f8"), (u"imag", "<f8")]
     
    @@ -1982,8 +1988,8 @@

    11) Variable-length (vlen) data types.

    of variable length sequences having the same type. To create a variable-length data type, use the createVLType method method of a Dataset or Group instance.

    -
    >>> f = Dataset("tst_vlen.nc","w")
    ->>> vlen_t = f.createVLType(numpy.int32, "phony_vlen")
    +
    >>> f = Dataset("tst_vlen.nc","w")
    +>>> vlen_t = f.createVLType(numpy.int32, "phony_vlen")
     
    @@ -1992,9 +1998,9 @@

    11) Variable-length (vlen) data types.

    used (signed and unsigned integers, 32 and 64 bit floats, and characters), but compound data types cannot. A new variable can then be created using this datatype.

    -
    >>> x = f.createDimension("x",3)
    ->>> y = f.createDimension("y",4)
    ->>> vlvar = f.createVariable("phony_vlen_var", vlen_t, ("y","x"))
    +
    >>> x = f.createDimension("x",3)
    +>>> y = f.createDimension("y",4)
    +>>> vlvar = f.createVariable("phony_vlen_var", vlen_t, ("y","x"))
     
    @@ -2005,13 +2011,13 @@

    11) Variable-length (vlen) data types.

    but of varying length. In this case, they contain 1-D numpy int32 arrays of random length between 1 and 10.

    -
    >>> import random
    +
    >>> import random
     >>> data = numpy.empty(len(y)*len(x),object)
     >>> for n in range(len(y)*len(x)):
    ->>>    data[n] = numpy.arange(random.randint(1,10),dtype="int32")+1
    +>>>    data[n] = numpy.arange(random.randint(1,10),dtype="int32")+1
     >>> data = numpy.reshape(data,(len(y),len(x)))
     >>> vlvar[:] = data
    ->>> print "vlen variable =\n",vlvar[:]
    +>>> print "vlen variable =\n",vlvar[:]
     vlen variable =
     [[[ 1  2  3  4  5  6  7  8  9 10] [1 2 3 4 5] [1 2 3 4 5 6 7 8]]
      [[1 2 3 4 5 6 7] [1 2 3 4 5 6] [1 2 3 4 5]]
    @@ -2019,19 +2025,19 @@ 

    11) Variable-length (vlen) data types.

    [[ 1 2 3 4 5 6 7 8 9 10] [ 1 2 3 4 5 6 7 8 9 10] [1 2 3 4 5 6 7 8]]] >>> print f -<type "netCDF4._netCDF4.Dataset"> +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: x, y variables: phony_vlen_var groups: ->>> print f.variables["phony_vlen_var"] -<type "netCDF4._netCDF4.Variable"> +>>> print f.variables["phony_vlen_var"] +<type "netCDF4._netCDF4.Variable"> vlen phony_vlen_var(y, x) vlen data type: int32 unlimited dimensions: current shape = (4, 3) ->>> print f.VLtypes["phony_vlen"] -<type "netCDF4._netCDF4.VLType">: name = "phony_vlen", numpy dtype = int32 +>>> print f.VLtypes["phony_vlen"] +<type "netCDF4._netCDF4.VLType">: name = "phony_vlen", numpy dtype = int32
    @@ -2040,33 +2046,33 @@

    11) Variable-length (vlen) data types.

    Instead, simply use the python str builtin (or a numpy string datatype with fixed length greater than 1) when calling the createVariable method.

    -
    >>> z = f.createDimension("z",10)
    ->>> strvar = rootgrp.createVariable("strvar", str, "z")
    +
    >>> z = f.createDimension("z",10)
    +>>> strvar = rootgrp.createVariable("strvar", str, "z")
     

    In this example, an object array is filled with random python strings with random lengths between 2 and 12 characters, and the data in the object array is assigned to the vlen string variable.

    -
    >>> chars = "1234567890aabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
    ->>> data = numpy.empty(10,"O")
    +
    >>> chars = "1234567890aabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
    +>>> data = numpy.empty(10,"O")
     >>> for n in range(10):
     >>>     stringlen = random.randint(2,12)
    ->>>     data[n] = "".join([random.choice(chars) for i in range(stringlen)])
    +>>>     data[n] = "".join([random.choice(chars) for i in range(stringlen)])
     >>> strvar[:] = data
    ->>> print "variable-length string variable:\n",strvar[:]
    +>>> print "variable-length string variable:\n",strvar[:]
     variable-length string variable:
     [aDy29jPt 5DS9X8 jd7aplD b8t4RM jHh8hq KtaPWF9cQj Q1hHN5WoXSiT MMxsVeq tdLUzvVTzj]
     >>> print f
    -<type "netCDF4._netCDF4.Dataset">
    +<type "netCDF4._netCDF4.Dataset">
     root group (NETCDF4 file format):
         dimensions: x, y, z
         variables: phony_vlen_var, strvar
         groups:
    ->>> print f.variables["strvar"]
    -<type "netCDF4._netCDF4.Variable">
    +>>> print f.variables["strvar"]
    +<type "netCDF4._netCDF4.Variable">
     vlen strvar(z)
    -vlen data type: <type "str">
    +vlen data type: <type "str">
     unlimited dimensions:
     current size = (10,)
     
    @@ -2083,21 +2089,21 @@

    12) Enum data type.

    The base integer data type and a python dictionary describing the allowed values and their names are used to define an Enum data type using createEnumType.

    -
    >>> nc = Dataset('clouds.nc','w')
    ->>> # python dict with allowed values and their names.
    ->>> enum_dict = {u'Altocumulus': 7, u'Missing': 255, 
    ->>> u'Stratus': 2, u'Clear': 0,
    ->>> u'Nimbostratus': 6, u'Cumulus': 4, u'Altostratus': 5,
    ->>> u'Cumulonimbus': 1, u'Stratocumulus': 3}
    ->>> # create the Enum type called 'cloud_t'.
    ->>> cloud_type = nc.createEnumType(numpy.uint8,'cloud_t',enum_dict)
    +
    >>> nc = Dataset('clouds.nc','w')
    +>>> # python dict with allowed values and their names.
    +>>> enum_dict = {u'Altocumulus': 7, u'Missing': 255, 
    +>>> u'Stratus': 2, u'Clear': 0,
    +>>> u'Nimbostratus': 6, u'Cumulus': 4, u'Altostratus': 5,
    +>>> u'Cumulonimbus': 1, u'Stratocumulus': 3}
    +>>> # create the Enum type called 'cloud_t'.
    +>>> cloud_type = nc.createEnumType(numpy.uint8,'cloud_t',enum_dict)
     >>> print cloud_type
    -<type 'netCDF4._netCDF4.EnumType'>: name = 'cloud_t',
    -numpy dtype = uint8, fields/values ={u'Cumulus': 4,
    -u'Altocumulus': 7, u'Missing': 255,
    -u'Stratus': 2, u'Clear': 0,
    -u'Cumulonimbus': 1, u'Stratocumulus': 3,
    -u'Nimbostratus': 6, u'Altostratus': 5}
    +<type 'netCDF4._netCDF4.EnumType'>: name = 'cloud_t',
    +numpy dtype = uint8, fields/values ={u'Cumulus': 4,
    +u'Altocumulus': 7, u'Missing': 255,
    +u'Stratus': 2, u'Clear': 0,
    +u'Cumulonimbus': 1, u'Stratocumulus': 3,
    +u'Nimbostratus': 6, u'Altostratus': 5}
     
    @@ -2106,32 +2112,32 @@

    12) Enum data type.

    cloud types in enum_dict. A ValueError will be raised if an attempt is made to write an integer value not associated with one of the specified names.

    -
    >>> time = nc.createDimension('time',None)
    ->>> # create a 1d variable of type 'cloud_type'.
    ->>> # The fill_value is set to the 'Missing' named value.
    +
    >>> time = nc.createDimension('time',None)
    +>>> # create a 1d variable of type 'cloud_type'.
    +>>> # The fill_value is set to the 'Missing' named value.
     >>> cloud_var =
    ->>> nc.createVariable('primary_cloud',cloud_type,'time',
    ->>> fill_value=enum_dict['Missing'])
    ->>> # write some data to the variable.
    ->>> cloud_var[:] = [enum_dict['Clear'],enum_dict['Stratus'],
    ->>> enum_dict['Cumulus'],enum_dict['Missing'],
    ->>> enum_dict['Cumulonimbus']]
    +>>> nc.createVariable('primary_cloud',cloud_type,'time',
    +>>> fill_value=enum_dict['Missing'])
    +>>> # write some data to the variable.
    +>>> cloud_var[:] = [enum_dict['Clear'],enum_dict['Stratus'],
    +>>> enum_dict['Cumulus'],enum_dict['Missing'],
    +>>> enum_dict['Cumulonimbus']]
     >>> nc.close()
    ->>> # reopen the file, read the data.
    ->>> nc = Dataset('clouds.nc')
    ->>> cloud_var = nc.variables['primary_cloud']
    +>>> # reopen the file, read the data.
    +>>> nc = Dataset('clouds.nc')
    +>>> cloud_var = nc.variables['primary_cloud']
     >>> print cloud_var
    -<type 'netCDF4._netCDF4.Variable'>
    +<type 'netCDF4._netCDF4.Variable'>
     enum primary_cloud(time)
         _FillValue: 255
     enum data type: uint8
     unlimited dimensions: time
     current shape = (5,)
     >>> print cloud_var.datatype.enum_dict
    -{u'Altocumulus': 7, u'Missing': 255, u'Stratus': 2,
    -u'Clear': 0, u'Nimbostratus': 6, u'Cumulus': 4,
    -u'Altostratus': 5, u'Cumulonimbus': 1,
    -u'Stratocumulus': 3}
    +{u'Altocumulus': 7, u'Missing': 255, u'Stratus': 2,
    +u'Clear': 0, u'Nimbostratus': 6, u'Cumulus': 4,
    +u'Altostratus': 5, u'Cumulonimbus': 1,
    +u'Stratocumulus': 3}
     >>> print cloud_var[:]
     [0 2 4 -- 1]
     >>> nc.close()
    @@ -2144,10 +2150,10 @@ 

    13) Parallel IO.

    be built with parallel IO capabilities enabled. To use parallel IO, your program must be running in an MPI environment using mpi4py.

    -
    >>> from mpi4py import MPI
    +
    >>> from mpi4py import MPI
     >>> import numpy as np
     >>> from netCDF4 import Dataset
    ->>> rank = MPI.COMM_WORLD.rank  # The process ID (integer 0-3 for 4-process run)
    +>>> rank = MPI.COMM_WORLD.rank  # The process ID (integer 0-3 for 4-process run)
     
    @@ -2156,7 +2162,7 @@

    13) Parallel IO.

    The parallel features of netcdf4-python are mostly transparent - when a new dataset is created or an existing dataset is opened, use the parallel keyword to enable parallel access.

    -
    >>> nc = Dataset('parallel_tst.nc','w',parallel=True)
    +
    >>> nc = Dataset('parallel_tst.nc','w',parallel=True)
     
    @@ -2164,8 +2170,8 @@

    13) Parallel IO.

    MPI communicator (MPI_COMM_WORLD is used by default). Each process (or rank) can now write to the file indepedently. In this example the process rank is written to a different variable index on each task

    -
    >>> d = nc.createDimension('dim',4)
    ->>> v = nc.createVariable('var', numpy.int, 'dim')
    +
    >>> d = nc.createDimension('dim',4)
    +>>> v = nc.createVariable('var', numpy.int, 'dim')
     >>> v[rank] = rank
     >>> nc.close()
     
    @@ -2220,19 +2226,19 @@ 

    14) Dealing with strings.

    U#) array is created. When writing the data, stringtochar is used to convert the numpy string array to an array of characters with one more dimension. For example,

    -
    >>> nc = Dataset('stringtest.nc','w',format='NETCDF4_CLASSIC')
    ->>> nc.createDimension('nchars',3)
    ->>> nc.createDimension('nstrings',None)
    ->>> v = nc.createVariable('strings','S1',('nstrings','nchars'))
    ->>> datain = numpy.array(['foo','bar'],dtype='S3')
    ->>> v[:] = stringtochar(datain) # manual conversion to char array
    ->>> v[:] # data returned as char array
    -[[b'f' b'o' b'o']
    -[b'b' b'a' b'r']]
    ->>> v._Encoding = 'ascii' # this enables automatic conversion
    ->>> v[:] = datain # conversion to char array done internally
    ->>> v[:] # data returned in numpy string array
    -['foo' 'bar']
    +
    >>> nc = Dataset('stringtest.nc','w',format='NETCDF4_CLASSIC')
    +>>> nc.createDimension('nchars',3)
    +>>> nc.createDimension('nstrings',None)
    +>>> v = nc.createVariable('strings','S1',('nstrings','nchars'))
    +>>> datain = numpy.array(['foo','bar'],dtype='S3')
    +>>> v[:] = stringtochar(datain) # manual conversion to char array
    +>>> v[:] # data returned as char array
    +[[b'f' b'o' b'o']
    +[b'b' b'a' b'r']]
    +>>> v._Encoding = 'ascii' # this enables automatic conversion
    +>>> v[:] = datain # conversion to char array done internally
    +>>> v[:] # data returned in numpy string array
    +['foo' 'bar']
     >>> nc.close()
     
    @@ -2251,27 +2257,27 @@

    14) Dealing with strings.

    define the compound data type - the string dtype will be converted to character array dtype under the hood when creating the netcdf compound type. Here's an example:

    -
    >>> nc = Dataset('compoundstring_example.nc','w')
    ->>> dtype = numpy.dtype([('observation', 'f4'),
    -                  ('station_name','S80')])
    ->>> station_data_t = nc.createCompoundType(dtype,'station_data')
    ->>> nc.createDimension('station',None)
    ->>> statdat = nc.createVariable('station_obs', station_data_t, ('station',))
    +
    >>> nc = Dataset('compoundstring_example.nc','w')
    +>>> dtype = numpy.dtype([('observation', 'f4'),
    +                  ('station_name','S80')])
    +>>> station_data_t = nc.createCompoundType(dtype,'station_data')
    +>>> nc.createDimension('station',None)
    +>>> statdat = nc.createVariable('station_obs', station_data_t, ('station',))
     >>> data = numpy.empty(2,dtype)
    ->>> data['observation'][:] = (123.,3.14)
    ->>> data['station_name'][:] = ('Boulder','New York')
    ->>> statdat.dtype # strings actually stored as character arrays
    -{'names':['observation','station_name'], 'formats':['<f4',('S1', (80,))], 'offsets':[0,4], 'itemsize':84, 'aligned':True}
    ->>> statdat[:] = data # strings converted to character arrays internally
    ->>> statdat[:] # character arrays converted back to strings
    -[(123.  , 'Boulder') (  3.14, 'New York')]
    +>>> data['observation'][:] = (123.,3.14)
    +>>> data['station_name'][:] = ('Boulder','New York')
    +>>> statdat.dtype # strings actually stored as character arrays
    +{'names':['observation','station_name'], 'formats':['<f4',('S1', (80,))], 'offsets':[0,4], 'itemsize':84, 'aligned':True}
    +>>> statdat[:] = data # strings converted to character arrays internally
    +>>> statdat[:] # character arrays converted back to strings
    +[(123.  , 'Boulder') (  3.14, 'New York')]
     >>> statdat[:].dtype
    -{'names':['observation','station_name'], 'formats':['<f4','S80'], 'offsets':[0,4], 'itemsize':84, 'aligned':True}
    ->>> statdat.set_auto_chartostring(False) # turn off auto-conversion
    ->>> statdat[:] = data.view(dtype=[('observation', 'f4'),('station_name','S1',10)])
    ->>> statdat[:] # now structured array with char array subtype is returned
    -[(123.  , ['B', 'o', 'u', 'l', 'd', 'e', 'r', '', '', ''])
    -(  3.14, ['N', 'e', 'w', ' ', 'Y', 'o', 'r', 'k', '', ''])]
    +{'names':['observation','station_name'], 'formats':['<f4','S80'], 'offsets':[0,4], 'itemsize':84, 'aligned':True}
    +>>> statdat.set_auto_chartostring(False) # turn off auto-conversion
    +>>> statdat[:] = data.view(dtype=[('observation', 'f4'),('station_name','S1',10)])
    +>>> statdat[:] # now structured array with char array subtype is returned
    +[(123.  , ['B', 'o', 'u', 'l', 'd', 'e', 'r', '', '', ''])
    +(  3.14, ['N', 'e', 'w', ' ', 'Y', 'o', 'r', 'k', '', ''])]
     >>> nc.close()
     
    @@ -2527,7 +2533,7 @@

    Classes

    Ancestors (in MRO)

    Class variables

    @@ -2656,7 +2662,7 @@

    Static methods

    Ancestors (in MRO)

    • Dataset
    • -
    • builtins.object
    • +
    • __builtin__.object

    Class variables

    @@ -3173,10 +3179,10 @@

    Static methods

    Returns a list of variables that match specific conditions.

    Can pass in key=value parameters and variables are returned that contain all of the matches. For example,

    -
    >>> # Get variables with x-axis attribute.
    ->>> vs = nc.get_variables_by_attributes(axis='X')
    ->>> # Get variables with matching "standard_name" attribute
    ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
    +
    >>> # Get variables with x-axis attribute.
    +>>> vs = nc.get_variables_by_attributes(axis='X')
    +>>> # Get variables with matching "standard_name" attribute
    +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
     
    @@ -3184,11 +3190,11 @@

    Static methods

    callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example,

    -
    >>> # Get Axis variables
    ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    ->>> # Get variables that don't have an "axis" attribute
    +
    >>> # Get Axis variables
    +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    +>>> # Get variables that don't have an "axis" attribute
     >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None)
    ->>> # Get variables that have a "grid_mapping" attribute
    +>>> # Get variables that have a "grid_mapping" attribute
     >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)
     
    @@ -3543,7 +3549,7 @@

    Static methods

    Ancestors (in MRO)

    • Dimension
    • -
    • builtins.object
    • +
    • __builtin__.object

    Class variables

    @@ -3641,7 +3647,7 @@

    Static methods

    Ancestors (in MRO)

    • EnumType
    • -
    • builtins.object
    • +
    • __builtin__.object

    Class variables

    @@ -3729,15 +3735,22 @@

    Ancestors (in MRO)

    Class variables

    var cmptypes

    +

    + Inheritance: + Dataset.cmptypes +

    +

    The cmptypes dictionary maps the names of +compound types defined for the Group or Dataset to instances of the +CompoundType class.

    @@ -3745,9 +3758,16 @@

    Class variables

    var data_model

    +

    + Inheritance: + Dataset.data_model +

    +

    data_model describes the netCDF +data model version, one of NETCDF3_CLASSIC, NETCDF4, +NETCDF4_CLASSIC, NETCDF3_64BIT_OFFSET or NETCDF3_64BIT_DATA.

    @@ -3755,9 +3775,16 @@

    Class variables

    var dimensions

    +

    + Inheritance: + Dataset.dimensions +

    +

    The dimensions dictionary maps the names of +dimensions defined for the Group or Dataset to instances of the +Dimension class.

    @@ -3765,9 +3792,18 @@

    Class variables

    var disk_format

    +

    + Inheritance: + Dataset.disk_format +

    +

    disk_format describes the underlying +file format, one of NETCDF3, HDF5, HDF4, +PNETCDF, DAP2, DAP4 or UNDEFINED. Only available if using +netcdf C library version >= 4.3.1, otherwise will always return +UNDEFINED.

    @@ -3775,9 +3811,16 @@

    Class variables

    var enumtypes

    +

    + Inheritance: + Dataset.enumtypes +

    +

    The enumtypes dictionary maps the names of +Enum types defined for the Group or Dataset to instances of the +EnumType class.

    @@ -3785,9 +3828,14 @@

    Class variables

    var file_format

    +

    + Inheritance: + Dataset.file_format +

    +

    same as data_model, retained for backwards compatibility.

    @@ -3795,9 +3843,17 @@

    Class variables

    var groups

    +

    + Inheritance: + Dataset.groups +

    +

    The groups dictionary maps the names of groups created for +this Dataset or Group to instances of the Group class (the +Dataset class is simply a special case of the Group class which +describes the root group in the netCDF4 file).

    @@ -3805,9 +3861,15 @@

    Class variables

    var keepweakref

    +

    + Inheritance: + Dataset.keepweakref +

    +

    If True, child Dimension and Variables objects only keep weak references to +the parent Dataset or Group.

    @@ -3826,9 +3888,15 @@

    Class variables

    var parent

    +

    + Inheritance: + Dataset.parent +

    +

    parent is a reference to the parent +Group instance. None for the root group or Dataset instance

    @@ -3836,9 +3904,17 @@

    Class variables

    var path

    +

    + Inheritance: + Dataset.path +

    +

    path shows the location of the Group in +the Dataset in a unix directory format (the names of groups in the +hierarchy separated by backslashes). A Dataset instance is the root +group, so the path is simply '/'.

    @@ -3846,9 +3922,16 @@

    Class variables

    var variables

    +

    + Inheritance: + Dataset.variables +

    +

    The variables dictionary maps the names of variables +defined for this Dataset or Group to instances of the Variable +class.

    @@ -3856,9 +3939,16 @@

    Class variables

    var vltypes

    +

    + Inheritance: + Dataset.vltypes +

    +

    The vltypes dictionary maps the names of +variable-length types defined for the Group or Dataset to instances of the +VLType class.

    @@ -3895,6 +3985,10 @@

    Static methods

    def close(

    self)

    +

    + Inheritance: + Dataset.close +

    @@ -3911,10 +4005,14 @@

    Static methods

    def createCompoundType(

    self, datatype, datatype_name)

    +

    + Inheritance: + Dataset.createCompoundType +

    -

    Creates a new compound data type named datatype_name from the numpy +

    Creates a new compound data type named datatype_name from the numpy dtype object datatype.

    Note: If the new compound data type contains other compound data types (i.e. it is a 'nested' compound type, where not all of the elements @@ -3933,10 +4031,14 @@

    Static methods

    def createDimension(

    self, dimname, size=None)

    +

    + Inheritance: + Dataset.createDimension +

    -

    Creates a new dimension with the given dimname and size.

    +

    Creates a new dimension with the given dimname and size.

    size must be a positive integer or None, which stands for "unlimited" (default is None). Specifying a size of 0 also results in an unlimited dimension. The return value is the Dimension @@ -3955,10 +4057,14 @@

    Static methods

    def createEnumType(

    self, datatype, datatype_name, enum_dict)

    +

    + Inheritance: + Dataset.createEnumType +

    -

    Creates a new Enum data type named datatype_name from a numpy +

    Creates a new Enum data type named datatype_name from a numpy integer dtype object datatype, and a python dictionary defining the enum fields and values.

    The return value is the EnumType class instance describing the new @@ -3974,10 +4080,14 @@

    Static methods

    def createGroup(

    self, groupname)

    +

    + Inheritance: + Dataset.createGroup +

    -

    Creates a new Group with the given groupname.

    +

    Creates a new Group with the given groupname.

    If groupname is specified as a path, using forward slashes as in unix to separate components, then intermediate groups will be created as necessary (analogous to mkdir -p in unix). For example, @@ -3997,10 +4107,14 @@

    Static methods

    def createVLType(

    self, datatype, datatype_name)

    +

    + Inheritance: + Dataset.createVLType +

    -

    Creates a new VLEN data type named datatype_name from a numpy +

    Creates a new VLEN data type named datatype_name from a numpy dtype object datatype.

    The return value is the VLType class instance describing the new datatype.

    @@ -4015,10 +4129,14 @@

    Static methods

    def createVariable(

    self, varname, datatype, dimensions=(), zlib=False, complevel=4, shuffle=True, fletcher32=False, contiguous=False, chunksizes=None, endian='native', least_significant_digit=None, fill_value=None)

    +

    + Inheritance: + Dataset.createVariable +

    -

    Creates a new variable with the given varname, datatype, and +

    Creates a new variable with the given varname, datatype, and dimensions. If dimensions are not given, the variable is assumed to be a scalar.

    If varname is specified as a path, using forward slashes as in unix to @@ -4128,10 +4246,14 @@

    Static methods

    def delncattr(

    self,name,value)

    +

    + Inheritance: + Dataset.delncattr +

    -

    delete a netCDF dataset or group attribute. Use if you need to delete a +

    delete a netCDF dataset or group attribute. Use if you need to delete a netCDF attribute with the same name as one of the reserved python attributes.

    @@ -4145,10 +4267,14 @@

    Static methods

    def filepath(

    self,encoding=None)

    +

    + Inheritance: + Dataset.filepath +

    -

    Get the file system path (or the opendap URL) which was used to +

    Get the file system path (or the opendap URL) which was used to open/create the Dataset. Requires netcdf >= 4.1.2. The path is decoded into a string using sys.getfilesystemencoding() by default, this can be changed using the encoding kwarg.

    @@ -4163,16 +4289,20 @@

    Static methods

    def get_variables_by_attributes(

    ...)

    +

    + Inheritance: + Dataset.get_variables_by_attributes +

    -

    Returns a list of variables that match specific conditions.

    +

    Returns a list of variables that match specific conditions.

    Can pass in key=value parameters and variables are returned that contain all of the matches. For example,

    -
    >>> # Get variables with x-axis attribute.
    ->>> vs = nc.get_variables_by_attributes(axis='X')
    ->>> # Get variables with matching "standard_name" attribute
    ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
    +
    >>> # Get variables with x-axis attribute.
    +>>> vs = nc.get_variables_by_attributes(axis='X')
    +>>> # Get variables with matching "standard_name" attribute
    +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
     
    @@ -4180,11 +4310,11 @@

    Static methods

    callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example,

    -
    >>> # Get Axis variables
    ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    ->>> # Get variables that don't have an "axis" attribute
    +
    >>> # Get Axis variables
    +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    +>>> # Get variables that don't have an "axis" attribute
     >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None)
    ->>> # Get variables that have a "grid_mapping" attribute
    +>>> # Get variables that have a "grid_mapping" attribute
     >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)
     
    @@ -4198,10 +4328,14 @@

    Static methods

    def getncattr(

    self,name)

    +

    + Inheritance: + Dataset.getncattr +

    -

    retrieve a netCDF dataset or group attribute. +

    retrieve a netCDF dataset or group attribute. Use if you need to get a netCDF attribute with the same name as one of the reserved python attributes.

    option kwarg encoding can be used to specify the @@ -4217,10 +4351,14 @@

    Static methods

    def isopen(

    ...)

    +

    + Inheritance: + Dataset.isopen +

    -

    is the Dataset open or closed?

    +

    is the Dataset open or closed?

    @@ -4232,10 +4370,14 @@

    Static methods

    def ncattrs(

    self)

    +

    + Inheritance: + Dataset.ncattrs +

    -

    return netCDF global attribute names for this Dataset or Group in a list.

    +

    return netCDF global attribute names for this Dataset or Group in a list.

    @@ -4247,10 +4389,14 @@

    Static methods

    def renameAttribute(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameAttribute +

    -

    rename a Dataset or Group attribute named oldname to newname.

    +

    rename a Dataset or Group attribute named oldname to newname.

    @@ -4262,10 +4408,14 @@

    Static methods

    def renameDimension(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameDimension +

    -

    rename a Dimension named oldname to newname.

    +

    rename a Dimension named oldname to newname.

    @@ -4277,10 +4427,14 @@

    Static methods

    def renameGroup(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameGroup +

    -

    rename a Group named oldname to newname (requires netcdf >= 4.3.1).

    +

    rename a Group named oldname to newname (requires netcdf >= 4.3.1).

    @@ -4292,10 +4446,14 @@

    Static methods

    def renameVariable(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameVariable +

    -

    rename a Variable named oldname to newname

    +

    rename a Variable named oldname to newname

    @@ -4307,10 +4465,14 @@

    Static methods

    def set_always_mask(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_always_mask +

    -

    Call set_always_mask for all variables contained in +

    Call set_always_mask for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion of @@ -4330,10 +4492,14 @@

    Static methods

    def set_auto_chartostring(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_chartostring +

    -

    Call set_auto_chartostring for all variables contained in this Dataset or +

    Call set_auto_chartostring for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion of all character arrays <--> string arrays should be performed for @@ -4352,10 +4518,14 @@

    Static methods

    def set_auto_mask(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_mask +

    -

    Call set_auto_mask for all variables contained in this Dataset or +

    Call set_auto_mask for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion to masked arrays shall be applied for all variables.

    @@ -4372,10 +4542,14 @@

    Static methods

    def set_auto_maskandscale(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_maskandscale +

    -

    Call set_auto_maskandscale for all variables contained in this Dataset or +

    Call set_auto_maskandscale for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion to masked arrays and variable scaling shall be applied for all variables.

    @@ -4392,10 +4566,14 @@

    Static methods

    def set_auto_scale(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_scale +

    -

    Call set_auto_scale for all variables contained in this Dataset or +

    Call set_auto_scale for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic variable scaling shall be applied for all variables.

    @@ -4412,10 +4590,14 @@

    Static methods

    def set_fill_off(

    self)

    +

    + Inheritance: + Dataset.set_fill_off +

    -

    Sets the fill mode for a Dataset open for writing to off.

    +

    Sets the fill mode for a Dataset open for writing to off.

    This will prevent the data from being pre-filled with fill values, which may result in some performance improvements. However, you must then make sure the data is actually written before being read.

    @@ -4430,10 +4612,14 @@

    Static methods

    def set_fill_on(

    self)

    +

    + Inheritance: + Dataset.set_fill_on +

    -

    Sets the fill mode for a Dataset open for writing to on.

    +

    Sets the fill mode for a Dataset open for writing to on.

    This causes data to be pre-filled with fill values. The fill values can be controlled by the variable's _Fill_Value attribute, but is usually sufficient to the use the netCDF default _Fill_Value (defined @@ -4452,10 +4638,14 @@

    Static methods

    def setncattr(

    self,name,value)

    +

    + Inheritance: + Dataset.setncattr +

    -

    set a netCDF dataset or group attribute using name,value pair. +

    set a netCDF dataset or group attribute using name,value pair. Use if you need to set a netCDF attribute with the with the same name as one of the reserved python attributes.

    @@ -4469,10 +4659,14 @@

    Static methods

    def setncattr_string(

    self,name,value)

    +

    + Inheritance: + Dataset.setncattr_string +

    -

    set a netCDF dataset or group string attribute using name,value pair. +

    set a netCDF dataset or group string attribute using name,value pair. Use if you need to ensure that a netCDF attribute is created with type NC_STRING if the file format is NETCDF4.

    @@ -4486,10 +4680,14 @@

    Static methods

    def setncatts(

    self,attdict)

    +

    + Inheritance: + Dataset.setncatts +

    -

    set a bunch of netCDF dataset or group attributes at once using a python dictionary. +

    set a bunch of netCDF dataset or group attributes at once using a python dictionary. This may be faster when setting a lot of attributes for a NETCDF3 formatted file, since nc_redef/nc_enddef is not called in between setting each attribute

    @@ -4504,10 +4702,14 @@

    Static methods

    def sync(

    self)

    +

    + Inheritance: + Dataset.sync +

    -

    Writes all buffered data in the Dataset to the disk file.

    +

    Writes all buffered data in the Dataset to the disk file.

    @@ -4526,18 +4728,18 @@

    Static methods

    or NETCDF3_64BIT_DATA format (NETCDF4 Datasets won't work).

    Adapted from pycdf by Andre Gosselin.

    Example usage (See __init__ for more details):

    -
    >>> import numpy
    ->>> # create a series of netCDF files with a variable sharing
    ->>> # the same unlimited dimension.
    +
    >>> import numpy as np
    +>>> # create a series of netCDF files with a variable sharing
    +>>> # the same unlimited dimension.
     >>> for nf in range(10):
    ->>>     f = Dataset("mftest%s.nc" % nf,"w")
    ->>>     f.createDimension("x",None)
    ->>>     x = f.createVariable("x","i",("x",))
    ->>>     x[0:10] = numpy.arange(nf*10,10*(nf+1))
    +>>>     f = Dataset("mftest%s.nc" % nf,"w",format='NETCDF4_CLASSIC')
    +>>>     f.createDimension("x",None)
    +>>>     x = f.createVariable("x","i",("x",))
    +>>>     x[0:10] = np.arange(nf*10,10*(nf+1))
     >>>     f.close()
    ->>> # now read all those files in at once, in one Dataset.
    ->>> f = MFDataset("mftest*nc")
    ->>> print f.variables["x"][:]
    +>>> # now read all those files in at once, in one Dataset.
    +>>> f = MFDataset("mftest*nc")
    +>>> print f.variables["x"][:]
     [ 0  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
      25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
      50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    @@ -4552,15 +4754,22 @@ 

    Ancestors (in MRO)

    Class variables

    var cmptypes

    +

    + Inheritance: + Dataset.cmptypes +

    +

    The cmptypes dictionary maps the names of +compound types defined for the Group or Dataset to instances of the +CompoundType class.

    @@ -4568,9 +4777,16 @@

    Class variables

    var data_model

    +

    + Inheritance: + Dataset.data_model +

    +

    data_model describes the netCDF +data model version, one of NETCDF3_CLASSIC, NETCDF4, +NETCDF4_CLASSIC, NETCDF3_64BIT_OFFSET or NETCDF3_64BIT_DATA.

    @@ -4578,9 +4794,16 @@

    Class variables

    var dimensions

    +

    + Inheritance: + Dataset.dimensions +

    +

    The dimensions dictionary maps the names of +dimensions defined for the Group or Dataset to instances of the +Dimension class.

    @@ -4588,9 +4811,18 @@

    Class variables

    var disk_format

    +

    + Inheritance: + Dataset.disk_format +

    +

    disk_format describes the underlying +file format, one of NETCDF3, HDF5, HDF4, +PNETCDF, DAP2, DAP4 or UNDEFINED. Only available if using +netcdf C library version >= 4.3.1, otherwise will always return +UNDEFINED.

    @@ -4598,9 +4830,16 @@

    Class variables

    var enumtypes

    +

    + Inheritance: + Dataset.enumtypes +

    +

    The enumtypes dictionary maps the names of +Enum types defined for the Group or Dataset to instances of the +EnumType class.

    @@ -4608,9 +4847,14 @@

    Class variables

    var file_format

    +

    + Inheritance: + Dataset.file_format +

    +

    same as data_model, retained for backwards compatibility.

    @@ -4618,9 +4862,17 @@

    Class variables

    var groups

    +

    + Inheritance: + Dataset.groups +

    +

    The groups dictionary maps the names of groups created for +this Dataset or Group to instances of the Group class (the +Dataset class is simply a special case of the Group class which +describes the root group in the netCDF4 file).

    @@ -4628,9 +4880,15 @@

    Class variables

    var keepweakref

    +

    + Inheritance: + Dataset.keepweakref +

    +

    If True, child Dimension and Variables objects only keep weak references to +the parent Dataset or Group.

    @@ -4638,9 +4896,15 @@

    Class variables

    var parent

    +

    + Inheritance: + Dataset.parent +

    +

    parent is a reference to the parent +Group instance. None for the root group or Dataset instance

    @@ -4648,9 +4912,17 @@

    Class variables

    var path

    +

    + Inheritance: + Dataset.path +

    +

    path shows the location of the Group in +the Dataset in a unix directory format (the names of groups in the +hierarchy separated by backslashes). A Dataset instance is the root +group, so the path is simply '/'.

    @@ -4658,9 +4930,16 @@

    Class variables

    var variables

    +

    + Inheritance: + Dataset.variables +

    +

    The variables dictionary maps the names of variables +defined for this Dataset or Group to instances of the Variable +class.

    @@ -4668,83 +4947,35 @@

    Class variables

    var vltypes

    - - - -
    -
    - -
    -

    Static methods

    - -
    -
    -

    def __init__(

    self, files, check=False, aggdim=None, exclude=[], master_file=None)

    -
    -

    Inheritance: - Dataset.__init__ + Dataset.vltypes

    - - -

    __init__(self, files, check=False, aggdim=None, exclude=[])

    -

    Open a Dataset spanning multiple files, making it look as if it was a -single file. Variables in the list of files that share the same -dimension (specified with the keyword aggdim) are aggregated. If -aggdim is not specified, the unlimited is aggregated. Currently, -aggdim must be the leftmost (slowest varying) dimension of each -of the variables to be aggregated.

    -

    files: either a sequence of netCDF files or a string with a -wildcard (converted to a sorted list of files using glob) If -the master_file kwarg is not specified, the first file -in the list will become the "master" file, defining all the -variables with an aggregation dimension which may span -subsequent files. Attribute access returns attributes only from "master" -file. The files are always opened in read-only mode.

    -

    check: True if you want to do consistency checking to ensure the -correct variables structure for all of the netcdf files. Checking makes -the initialization of the MFDataset instance much slower. Default is -False.

    -

    aggdim: The name of the dimension to aggregate over (must -be the leftmost dimension of each of the variables to be aggregated). -If None (default), aggregate over the unlimited dimension.

    -

    exclude: A list of variable names to exclude from aggregation. -Default is an empty list.

    -

    master_file: file to use as "master file", defining all the -variables with an aggregation dimension and all global attributes.

    -
    -
    - -
    - -
    -
    -

    def close(

    self)

    -
    - - - -

    close(self)

    -

    close all the open files.

    +

    The vltypes dictionary maps the names of +variable-length types defined for the Group or Dataset to instances of the +VLType class.

    -
    - +
    +

    Static methods

    def createCompoundType(

    self, datatype, datatype_name)

    +

    + Inheritance: + Dataset.createCompoundType +

    -

    Creates a new compound data type named datatype_name from the numpy +

    Creates a new compound data type named datatype_name from the numpy dtype object datatype.

    Note: If the new compound data type contains other compound data types (i.e. it is a 'nested' compound type, where not all of the elements @@ -4763,10 +4994,14 @@

    Static methods

    def createDimension(

    self, dimname, size=None)

    +

    + Inheritance: + Dataset.createDimension +

    -

    Creates a new dimension with the given dimname and size.

    +

    Creates a new dimension with the given dimname and size.

    size must be a positive integer or None, which stands for "unlimited" (default is None). Specifying a size of 0 also results in an unlimited dimension. The return value is the Dimension @@ -4785,10 +5020,14 @@

    Static methods

    def createEnumType(

    self, datatype, datatype_name, enum_dict)

    +

    + Inheritance: + Dataset.createEnumType +

    -

    Creates a new Enum data type named datatype_name from a numpy +

    Creates a new Enum data type named datatype_name from a numpy integer dtype object datatype, and a python dictionary defining the enum fields and values.

    The return value is the EnumType class instance describing the new @@ -4804,10 +5043,14 @@

    Static methods

    def createGroup(

    self, groupname)

    +

    + Inheritance: + Dataset.createGroup +

    -

    Creates a new Group with the given groupname.

    +

    Creates a new Group with the given groupname.

    If groupname is specified as a path, using forward slashes as in unix to separate components, then intermediate groups will be created as necessary (analogous to mkdir -p in unix). For example, @@ -4827,10 +5070,14 @@

    Static methods

    def createVLType(

    self, datatype, datatype_name)

    +

    + Inheritance: + Dataset.createVLType +

    -

    Creates a new VLEN data type named datatype_name from a numpy +

    Creates a new VLEN data type named datatype_name from a numpy dtype object datatype.

    The return value is the VLType class instance describing the new datatype.

    @@ -4845,10 +5092,14 @@

    Static methods

    def createVariable(

    self, varname, datatype, dimensions=(), zlib=False, complevel=4, shuffle=True, fletcher32=False, contiguous=False, chunksizes=None, endian='native', least_significant_digit=None, fill_value=None)

    +

    + Inheritance: + Dataset.createVariable +

    -

    Creates a new variable with the given varname, datatype, and +

    Creates a new variable with the given varname, datatype, and dimensions. If dimensions are not given, the variable is assumed to be a scalar.

    If varname is specified as a path, using forward slashes as in unix to @@ -4958,10 +5209,14 @@

    Static methods

    def delncattr(

    self,name,value)

    +

    + Inheritance: + Dataset.delncattr +

    -

    delete a netCDF dataset or group attribute. Use if you need to delete a +

    delete a netCDF dataset or group attribute. Use if you need to delete a netCDF attribute with the same name as one of the reserved python attributes.

    @@ -4975,10 +5230,14 @@

    Static methods

    def filepath(

    self,encoding=None)

    +

    + Inheritance: + Dataset.filepath +

    -

    Get the file system path (or the opendap URL) which was used to +

    Get the file system path (or the opendap URL) which was used to open/create the Dataset. Requires netcdf >= 4.1.2. The path is decoded into a string using sys.getfilesystemencoding() by default, this can be changed using the encoding kwarg.

    @@ -4993,16 +5252,20 @@

    Static methods

    def get_variables_by_attributes(

    ...)

    +

    + Inheritance: + Dataset.get_variables_by_attributes +

    -

    Returns a list of variables that match specific conditions.

    +

    Returns a list of variables that match specific conditions.

    Can pass in key=value parameters and variables are returned that contain all of the matches. For example,

    -
    >>> # Get variables with x-axis attribute.
    ->>> vs = nc.get_variables_by_attributes(axis='X')
    ->>> # Get variables with matching "standard_name" attribute
    ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
    +
    >>> # Get variables with x-axis attribute.
    +>>> vs = nc.get_variables_by_attributes(axis='X')
    +>>> # Get variables with matching "standard_name" attribute
    +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')
     
    @@ -5010,11 +5273,11 @@

    Static methods

    callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example,

    -
    >>> # Get Axis variables
    ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    ->>> # Get variables that don't have an "axis" attribute
    +
    >>> # Get Axis variables
    +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T'])
    +>>> # Get variables that don't have an "axis" attribute
     >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None)
    ->>> # Get variables that have a "grid_mapping" attribute
    +>>> # Get variables that have a "grid_mapping" attribute
     >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)
     
    @@ -5028,10 +5291,14 @@

    Static methods

    def getncattr(

    self,name)

    +

    + Inheritance: + Dataset.getncattr +

    -

    retrieve a netCDF dataset or group attribute. +

    retrieve a netCDF dataset or group attribute. Use if you need to get a netCDF attribute with the same name as one of the reserved python attributes.

    option kwarg encoding can be used to specify the @@ -5047,26 +5314,14 @@

    Static methods

    def isopen(

    ...)

    +

    + Inheritance: + Dataset.isopen +

    -

    is the Dataset open or closed?

    -
    -
    - -
    - - -
    -
    -

    def ncattrs(

    self)

    -
    - - - - -

    ncattrs(self)

    -

    return the netcdf attribute names from the master file.

    +

    is the Dataset open or closed?

    @@ -5078,10 +5333,14 @@

    Static methods

    def renameAttribute(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameAttribute +

    -

    rename a Dataset or Group attribute named oldname to newname.

    +

    rename a Dataset or Group attribute named oldname to newname.

    @@ -5093,10 +5352,14 @@

    Static methods

    def renameDimension(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameDimension +

    -

    rename a Dimension named oldname to newname.

    +

    rename a Dimension named oldname to newname.

    @@ -5108,10 +5371,14 @@

    Static methods

    def renameGroup(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameGroup +

    -

    rename a Group named oldname to newname (requires netcdf >= 4.3.1).

    +

    rename a Group named oldname to newname (requires netcdf >= 4.3.1).

    @@ -5123,10 +5390,14 @@

    Static methods

    def renameVariable(

    self, oldname, newname)

    +

    + Inheritance: + Dataset.renameVariable +

    -

    rename a Variable named oldname to newname

    +

    rename a Variable named oldname to newname

    @@ -5138,10 +5409,14 @@

    Static methods

    def set_always_mask(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_always_mask +

    -

    Call set_always_mask for all variables contained in +

    Call set_always_mask for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion of @@ -5161,10 +5436,14 @@

    Static methods

    def set_auto_chartostring(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_chartostring +

    -

    Call set_auto_chartostring for all variables contained in this Dataset or +

    Call set_auto_chartostring for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion of all character arrays <--> string arrays should be performed for @@ -5183,10 +5462,14 @@

    Static methods

    def set_auto_mask(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_mask +

    -

    Call set_auto_mask for all variables contained in this Dataset or +

    Call set_auto_mask for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion to masked arrays shall be applied for all variables.

    @@ -5203,10 +5486,14 @@

    Static methods

    def set_auto_maskandscale(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_maskandscale +

    -

    Call set_auto_maskandscale for all variables contained in this Dataset or +

    Call set_auto_maskandscale for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic conversion to masked arrays and variable scaling shall be applied for all variables.

    @@ -5223,10 +5510,14 @@

    Static methods

    def set_auto_scale(

    self, True_or_False)

    +

    + Inheritance: + Dataset.set_auto_scale +

    -

    Call set_auto_scale for all variables contained in this Dataset or +

    Call set_auto_scale for all variables contained in this Dataset or Group, as well as for all variables in all its subgroups.

    True_or_False: Boolean determining if automatic variable scaling shall be applied for all variables.

    @@ -5243,10 +5534,14 @@

    Static methods

    def set_fill_off(

    self)

    +

    + Inheritance: + Dataset.set_fill_off +

    -

    Sets the fill mode for a Dataset open for writing to off.

    +

    Sets the fill mode for a Dataset open for writing to off.

    This will prevent the data from being pre-filled with fill values, which may result in some performance improvements. However, you must then make sure the data is actually written before being read.

    @@ -5261,10 +5556,14 @@

    Static methods

    def set_fill_on(

    self)

    +

    + Inheritance: + Dataset.set_fill_on +

    -

    Sets the fill mode for a Dataset open for writing to on.

    +

    Sets the fill mode for a Dataset open for writing to on.

    This causes data to be pre-filled with fill values. The fill values can be controlled by the variable's _Fill_Value attribute, but is usually sufficient to the use the netCDF default _Fill_Value (defined @@ -5283,10 +5582,14 @@

    Static methods

    def setncattr(

    self,name,value)

    +

    + Inheritance: + Dataset.setncattr +

    -

    set a netCDF dataset or group attribute using name,value pair. +

    set a netCDF dataset or group attribute using name,value pair. Use if you need to set a netCDF attribute with the with the same name as one of the reserved python attributes.

    @@ -5300,10 +5603,14 @@

    Static methods

    def setncattr_string(

    self,name,value)

    +

    + Inheritance: + Dataset.setncattr_string +

    -

    set a netCDF dataset or group string attribute using name,value pair. +

    set a netCDF dataset or group string attribute using name,value pair. Use if you need to ensure that a netCDF attribute is created with type NC_STRING if the file format is NETCDF4.

    @@ -5317,10 +5624,14 @@

    Static methods

    def setncatts(

    self,attdict)

    +

    + Inheritance: + Dataset.setncatts +

    -

    set a bunch of netCDF dataset or group attributes at once using a python dictionary. +

    set a bunch of netCDF dataset or group attributes at once using a python dictionary. This may be faster when setting a lot of attributes for a NETCDF3 formatted file, since nc_redef/nc_enddef is not called in between setting each attribute

    @@ -5335,10 +5646,95 @@

    Static methods

    def sync(

    self)

    +

    + Inheritance: + Dataset.sync +

    -

    Writes all buffered data in the Dataset to the disk file.

    +

    Writes all buffered data in the Dataset to the disk file.

    +
    +
    + +
    + +

    Methods

    + +
    +
    +

    def __init__(

    self, files, check=False, aggdim=None, exclude=[])

    +
    + +

    + Inheritance: + Dataset.__init__ +

    + + + +

    Open a Dataset spanning multiple files, making it look as if it was a +single file. Variables in the list of files that share the same +dimension (specified with the keyword aggdim) are aggregated. If +aggdim is not specified, the unlimited is aggregated. Currently, +aggdim must be the leftmost (slowest varying) dimension of each +of the variables to be aggregated.

    +

    files: either a sequence of netCDF files or a string with a +wildcard (converted to a sorted list of files using glob) If +the master_file kwarg is not specified, the first file +in the list will become the "master" file, defining all the +variables with an aggregation dimension which may span +subsequent files. Attribute access returns attributes only from "master" +file. The files are always opened in read-only mode.

    +

    check: True if you want to do consistency checking to ensure the +correct variables structure for all of the netcdf files. Checking makes +the initialization of the MFDataset instance much slower. Default is +False.

    +

    aggdim: The name of the dimension to aggregate over (must +be the leftmost dimension of each of the variables to be aggregated). +If None (default), aggregate over the unlimited dimension.

    +

    exclude: A list of variable names to exclude from aggregation. +Default is an empty list.

    +

    master_file: file to use as "master file", defining all the +variables with an aggregation dimension and all global attributes.

    +
    +
    + +
    + + +
    +
    +

    def close(

    self)

    +
    + +

    + Inheritance: + Dataset.close +

    + + + +

    close all the open files.

    +
    +
    + +
    + + +
    +
    +

    def ncattrs(

    self)

    +
    + +

    + Inheritance: + Dataset.ncattrs +

    + + + +

    return the netcdf attribute names from the master file.

    @@ -5354,27 +5750,27 @@

    Static methods

    Class providing an interface to a MFDataset time Variable by imposing a unique common time unit and/or calendar to all files.

    Example usage (See __init__ for more details):

    -
    >>> import numpy
    ->>> f1 = Dataset("mftest_1.nc","w", format="NETCDF4_CLASSIC")
    ->>> f2 = Dataset("mftest_2.nc","w", format="NETCDF4_CLASSIC")
    ->>> f1.createDimension("time",None)
    ->>> f2.createDimension("time",None)
    ->>> t1 = f1.createVariable("time","i",("time",))
    ->>> t2 = f2.createVariable("time","i",("time",))
    ->>> t1.units = "days since 2000-01-01"
    ->>> t2.units = "days since 2000-02-01"
    ->>> t1.calendar = "standard"
    ->>> t2.calendar = "standard"
    +
    >>> import numpy
    +>>> f1 = Dataset("mftest_1.nc","w", format="NETCDF4_CLASSIC")
    +>>> f2 = Dataset("mftest_2.nc","w", format="NETCDF4_CLASSIC")
    +>>> f1.createDimension("time",None)
    +>>> f2.createDimension("time",None)
    +>>> t1 = f1.createVariable("time","i",("time",))
    +>>> t2 = f2.createVariable("time","i",("time",))
    +>>> t1.units = "days since 2000-01-01"
    +>>> t2.units = "days since 2000-02-01"
    +>>> t1.calendar = "standard"
    +>>> t2.calendar = "standard"
     >>> t1[:] = numpy.arange(31)
     >>> t2[:] = numpy.arange(30)
     >>> f1.close()
     >>> f2.close()
    ->>> # Read the two files in at once, in one Dataset.
    ->>> f = MFDataset("mftest*nc")
    ->>> t = f.variables["time"]
    +>>> # Read the two files in at once, in one Dataset.
    +>>> f = MFDataset("mftest*nc")
    +>>> t = f.variables["time"]
     >>> print t.units
     days since 2000-01-01
    ->>> print t[32] # The value written in the file, inconsistent with the MF time units.
    +>>> print t[32] # The value written in the file, inconsistent with the MF time units.
     1
     >>> T = MFTime(t)
     >>> print T[32]
    @@ -5389,9 +5785,9 @@ 

    Ancestors (in MRO)

    • MFTime
    • netCDF4._netCDF4._Variable
    • -
    • builtins.object
    • +
    • __builtin__.object
    -

    Static methods

    +

    Methods

    @@ -5401,8 +5797,7 @@

    Static methods

    -

    __init__(self, time, units=None, calendar=None)

    -

    Create a time Variable with units consistent across a multifile +

    Create a time Variable with units consistent across a multifile dataset.

    time: Time variable from a MFDataset.

    units: Time units, for example, 'days since 1979-01-01'. If None, @@ -5419,7 +5814,7 @@

    Static methods

    -

    def ncattrs(

    self)

    +

    def ncattrs(

    ...)

    @@ -5433,7 +5828,7 @@

    Static methods

    -

    def set_auto_chartostring(

    self, val)

    +

    def set_auto_chartostring(

    ...)

    @@ -5447,7 +5842,7 @@

    Static methods

    -

    def set_auto_mask(

    self, val)

    +

    def set_auto_mask(

    ...)

    @@ -5461,7 +5856,7 @@

    Static methods

    -

    def set_auto_maskandscale(

    self, val)

    +

    def set_auto_maskandscale(

    ...)

    @@ -5475,7 +5870,7 @@

    Static methods

    -

    def set_auto_scale(

    self, val)

    +

    def set_auto_scale(

    ...)

    @@ -5489,7 +5884,7 @@

    Static methods

    -

    def typecode(

    self)

    +

    def typecode(

    ...)

    @@ -5521,7 +5916,7 @@

    Static methods

    Ancestors (in MRO)

    • VLType
    • -
    • builtins.object
    • +
    • __builtin__.object

    Class variables

    @@ -5626,7 +6021,7 @@

    Static methods

    Ancestors (in MRO)

    • Variable
    • -
    • builtins.object
    • +
    • __builtin__.object

    Class variables

    @@ -6148,12 +6543,12 @@

    Static methods

    If maskandscale is set to True, and the variable has a scale_factor or an add_offset attribute, then data read from that variable is unpacked using::

    -
    data = self.scale_factor*data + self.add_offset
    +
    data = self.scale_factor*data + self.add_offset
     

    When data is written to a variable it is packed using::

    -
    data = (data - self.add_offset)/self.scale_factor
    +
    data = (data - self.add_offset)/self.scale_factor
     
    @@ -6193,12 +6588,12 @@

    Static methods

    If scale is set to True, and the variable has a scale_factor or an add_offset attribute, then data read from that variable is unpacked using::

    -
    data = self.scale_factor*data + self.add_offset
    +
    data = self.scale_factor*data + self.add_offset
     

    When data is written to a variable it is packed using::

    -
    data = (data - self.add_offset)/self.scale_factor
    +
    data = (data - self.add_offset)/self.scale_factor
     
    @@ -6336,7 +6731,7 @@

    Static methods