From 057f975d5cb4aefc04bd6c208f046e7890ebf270 Mon Sep 17 00:00:00 2001
From: Jeff Whitaker netcdf4-python is a Python interface to the netCDF C library. def __init__( self, files, check=False, aggdim=None, exclude=[]) def __init__( self, files, check=False, aggdim=None, exclude=[], master_file=None)
@@ -4696,7 +4697,8 @@ Index
netCDF4 module
- Version 1.4.1
+ Version 1.4.2
Introduction
Static methods
Static methods
aggdim
must be the leftmost (slowest varying) dimension of each
of the variables to be aggregated.
files
: either a sequence of netCDF files or a string with a
-wildcard (converted to a sorted list of files using glob) The first file
+wildcard (converted to a sorted list of files using glob) If
+the master_file
kwarg is not specified, the first file
in the list will become the "master" file, defining all the
variables with an aggregation dimension which may span
subsequent files. Attribute access returns attributes only from "master"
@@ -4709,7 +4711,9 @@ Static methods
be the leftmost dimension of each of the variables to be aggregated).
If None (default), aggregate over the unlimited dimension.
exclude
: A list of variable names to exclude from aggregation.
-Default is an empty list.
master_file
: file to use as "master file", defining all the
+variables with an aggregation dimension and all global attributes.
def get_dims(
self)
+return a tuple of Dimension
instances associated with this
+`netCDF4.Variable.
def get_var_chunk_cache(
self)
From 70f8faf064a113cdb7dd2b2dc5da29706b7aef99 Mon Sep 17 00:00:00 2001 From: Jeff Whitakerclose
method of the Dataset
instance.
Here's an example:
->>> from netCDF4 import Dataset ->>> rootgrp = Dataset("test.nc", "w", format="NETCDF4") +>>> from netCDF4 import Dataset +>>> rootgrp = Dataset("test.nc", "w", format="NETCDF4") >>> print rootgrp.data_model NETCDF4 >>> rootgrp.close() @@ -1427,13 +1433,13 @@
2) Groups in a netCDF file. thegroups
dictionary attribute of theDataset
instance. OnlyNETCDF4
formatted files support Groups, if you try to create a Group in a netCDF 3 file you will get an error message. ->>> rootgrp = Dataset("test.nc", "a") ->>> fcstgrp = rootgrp.createGroup("forecasts") ->>> analgrp = rootgrp.createGroup("analyses") +@@ -1445,8 +1451,8 @@>>> rootgrp = Dataset("test.nc", "a") +>>> fcstgrp = rootgrp.createGroup("forecasts") +>>> analgrp = rootgrp.createGroup("analyses") >>> print rootgrp.groups -OrderedDict([("forecasts", +OrderedDict([("forecasts", <netCDF4._netCDF4.Group object at 0x1b4b7b0>), - ("analyses", + ("analyses", <netCDF4._netCDF4.Group object at 0x1b4b970>)])
2) Groups in a netCDF file.path
attribute that contains a simulated unix directory path to that group. To simplify the creation of nested groups, you can use a unix-like path as an argument tocreateGroup
. ->>> fcstgrp1 = rootgrp.createGroup("/forecasts/model1") ->>> fcstgrp2 = rootgrp.createGroup("/forecasts/model2") +@@ -1458,7 +1464,7 @@>>> fcstgrp1 = rootgrp.createGroup("/forecasts/model1") +>>> fcstgrp2 = rootgrp.createGroup("/forecasts/model2")
2) Groups in a netCDF file.Dataset
. The functionwalktree
is a Python generator that is used to walk the directory tree. Note that printing theDataset
orGroup
object yields summary information about it's contents. ->>> def walktree(top): +>>> def walktree(top): >>> values = top.groups.values() >>> yield values >>> for value in top.groups.values(): @@ -1468,27 +1474,27 @@
2) Groups in a netCDF file. >>> for children in walktree(rootgrp): >>> for child in children: >>> print child -<type "netCDF4._netCDF4.Dataset"> +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: variables: groups: forecasts, analyses -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts: dimensions: variables: groups: model1, model2 -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /analyses: dimensions: variables: groups: -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts/model1: dimensions: variables: groups: -<type "netCDF4._netCDF4.Group"> +<type "netCDF4._netCDF4.Group"> group /forecasts/model2: dimensions: variables: @@ -1509,19 +1515,19 @@
3) Dimensions in a netCDF file.level
dimensions are unlimited. Having more than one unlimited dimension is a new netCDF 4 feature, in netCDF 3 files there may be only one, and it must be the first (leftmost) dimension of the variable. ->>> level = rootgrp.createDimension("level", None) ->>> time = rootgrp.createDimension("time", None) ->>> lat = rootgrp.createDimension("lat", 73) ->>> lon = rootgrp.createDimension("lon", 144) +>>> level = rootgrp.createDimension("level", None) +>>> time = rootgrp.createDimension("time", None) +>>> lat = rootgrp.createDimension("lat", 73) +>>> lon = rootgrp.createDimension("lon", 144)All of the
-Dimension
instances are stored in a python dictionary.>>> print rootgrp.dimensions -OrderedDict([("level", <netCDF4._netCDF4.Dimension object at 0x1b48030>), - ("time", <netCDF4._netCDF4.Dimension object at 0x1b481c0>), - ("lat", <netCDF4._netCDF4.Dimension object at 0x1b480f8>), - ("lon", <netCDF4._netCDF4.Dimension object at 0x1b48a08>)]) +@@ -1529,7 +1535,7 @@>>> print rootgrp.dimensions +OrderedDict([("level", <netCDF4._netCDF4.Dimension object at 0x1b48030>), + ("time", <netCDF4._netCDF4.Dimension object at 0x1b481c0>), + ("lat", <netCDF4._netCDF4.Dimension object at 0x1b480f8>), + ("lon", <netCDF4._netCDF4.Dimension object at 0x1b48a08>)])
3) Dimensions in a netCDF file. the current size of that dimension. Theisunlimited
method of aDimension
instance can be used to determine if the dimensions is unlimited, or appendable. ->>> print len(lon) +>>> print len(lon) 144 >>> print lon.isunlimited() False @@ -1541,13 +1547,13 @@
3) Dimensions in a netCDF file.Printing the
-Dimension
object provides useful summary info, including the name and length of the dimension, and whether it is unlimited.>>> for dimobj in rootgrp.dimensions.values(): +@@ -1584,18 +1590,18 @@>>> for dimobj in rootgrp.dimensions.values(): >>> print dimobj -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "level", size = 0 -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0 -<type "netCDF4._netCDF4.Dimension">: name = "lat", size = 73 -<type "netCDF4._netCDF4.Dimension">: name = "lon", size = 144 -<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0 +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "level", size = 0 +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0 +<type "netCDF4._netCDF4.Dimension">: name = "lat", size = 73 +<type "netCDF4._netCDF4.Dimension">: name = "lon", size = 144 +<type "netCDF4._netCDF4.Dimension"> (unlimited): name = "time", size = 0
4) Variables in a netCDF file. coordinate variables. ThecreateVariable
method returns an instance of theVariable
class whose methods can be used later to access and set variable data and attributes. ->>> times = rootgrp.createVariable("time","f8",("time",)) ->>> levels = rootgrp.createVariable("level","i4",("level",)) ->>> latitudes = rootgrp.createVariable("lat","f4",("lat",)) ->>> longitudes = rootgrp.createVariable("lon","f4",("lon",)) ->>> # two dimensions unlimited ->>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",)) +>>> times = rootgrp.createVariable("time","f8",("time",)) +>>> levels = rootgrp.createVariable("level","i4",("level",)) +>>> latitudes = rootgrp.createVariable("lat","f4",("lat",)) +>>> longitudes = rootgrp.createVariable("lon","f4",("lon",)) +>>> # two dimensions unlimited +>>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))To get summary info on a
-Variable
instance in an interactive session, just print it.>>> print temp -<type "netCDF4._netCDF4.Variable"> +>>> print temp +<type "netCDF4._netCDF4.Variable"> float32 temp(time, level, lat, lon) least_significant_digit: 3 units: K @@ -1605,21 +1611,21 @@
4) Variables in a netCDF file.You can use a path to create a Variable inside a hierarchy of groups.
->>> ftemp = rootgrp.createVariable("/forecasts/model1/temp","f4",("time","level","lat","lon",)) +>>> ftemp = rootgrp.createVariable("/forecasts/model1/temp","f4",("time","level","lat","lon",))If the intermediate groups do not yet exist, they will be created.
You can also query a
-Dataset
orGroup
instance directly to obtainGroup
orVariable
instances using paths.>>> print rootgrp["/forecasts/model1"] # a Group instance -<type "netCDF4._netCDF4.Group"> +>>> print rootgrp["/forecasts/model1"] # a Group instance +<type "netCDF4._netCDF4.Group"> group /forecasts/model1: dimensions(sizes): variables(dimensions): float32 temp(time,level,lat,lon) groups: ->>> print rootgrp["/forecasts/model1/temp"] # a Variable instance -<type "netCDF4._netCDF4.Variable"> +>>> print rootgrp["/forecasts/model1/temp"] # a Variable instance +<type "netCDF4._netCDF4.Variable"> float32 temp(time, level, lat, lon) path = /forecasts/model1 unlimited dimensions: time, level @@ -1630,12 +1636,12 @@
4) Variables in a netCDF file.All of the variables in the
-Dataset
orGroup
are stored in a Python dictionary, in the same way as the dimensions:>>> print rootgrp.variables -OrderedDict([("time", <netCDF4.Variable object at 0x1b4ba70>), - ("level", <netCDF4.Variable object at 0x1b4bab0>), - ("lat", <netCDF4.Variable object at 0x1b4baf0>), - ("lon", <netCDF4.Variable object at 0x1b4bb30>), - ("temp", <netCDF4.Variable object at 0x1b4bb70>)]) +@@ -1651,16 +1657,16 @@>>> print rootgrp.variables +OrderedDict([("time", <netCDF4.Variable object at 0x1b4ba70>), + ("level", <netCDF4.Variable object at 0x1b4bab0>), + ("lat", <netCDF4.Variable object at 0x1b4baf0>), + ("lon", <netCDF4.Variable object at 0x1b4bb30>), + ("temp", <netCDF4.Variable object at 0x1b4bb70>)])
5) Attributes in a netCDF file. attributes are set by assigning values toVariable
instances variables. Attributes can be strings, numbers or sequences. Returning to our example, ->>> import time ->>> rootgrp.description = "bogus example script" ->>> rootgrp.history = "Created " + time.ctime(time.time()) ->>> rootgrp.source = "netCDF4 python module tutorial" ->>> latitudes.units = "degrees north" ->>> longitudes.units = "degrees east" ->>> levels.units = "hPa" ->>> temp.units = "K" ->>> times.units = "hours since 0001-01-01 00:00:00.0" ->>> times.calendar = "gregorian" +@@ -1669,8 +1675,8 @@>>> import time +>>> rootgrp.description = "bogus example script" +>>> rootgrp.history = "Created " + time.ctime(time.time()) +>>> rootgrp.source = "netCDF4 python module tutorial" +>>> latitudes.units = "degrees north" +>>> longitudes.units = "degrees east" +>>> levels.units = "hPa" +>>> temp.units = "K" +>>> times.units = "hours since 0001-01-01 00:00:00.0" +>>> times.calendar = "gregorian"
5) Attributes in a netCDF file. attributes. This method is provided as a convenience, since using the built-indir
Python function will return a bunch of private methods and attributes that cannot (or should not) be modified by the user. ->>> for name in rootgrp.ncattrs(): ->>> print "Global attr", name, "=", getattr(rootgrp,name) +>>> for name in rootgrp.ncattrs(): +>>> print "Global attr", name, "=", getattr(rootgrp,name) Global attr description = bogus example script Global attr history = Created Mon Nov 7 10.30:56 2005 Global attr source = netCDF4 python module tutorial @@ -1680,10 +1686,10 @@
5) Attributes in a netCDF file.The
-__dict__
attribute of aDataset
,Group
orVariable
instance provides all the netCDF attribute name/value pairs in a python dictionary:>>> print rootgrp.__dict__ -OrderedDict([(u"description", u"bogus example script"), - (u"history", u"Created Thu Mar 3 19:30:33 2011"), - (u"source", u"netCDF4 python module tutorial")]) +@@ -1693,12 +1699,12 @@>>> print rootgrp.__dict__ +OrderedDict([(u"description", u"bogus example script"), + (u"history", u"Created Thu Mar 3 19:30:33 2011"), + (u"source", u"netCDF4 python module tutorial")])
5) Attributes in a netCDF file.
6) Writing data to and retrieving data from a netCDF variable.Now that you have a netCDF
-Variable
instance, how do you put data into it? You can just treat it like an array and assign data to a slice.>>> import numpy +>>> import numpy >>> lats = numpy.arange(-90,91,2.5) >>> lons = numpy.arange(-180,180,2.5) >>> latitudes[:] = lats >>> longitudes[:] = lons ->>> print "latitudes =\n",latitudes[:] +>>> print "latitudes =\n",latitudes[:] latitudes = [-90. -87.5 -85. -82.5 -80. -77.5 -75. -72.5 -70. -67.5 -65. -62.5 -60. -57.5 -55. -52.5 -50. -47.5 -45. -42.5 -40. -37.5 -35. -32.5 @@ -1713,19 +1719,19 @@
6) Writing data to and retrieving data from a netCDF variUnlike NumPy's array objects, netCDF
-Variable
objects with unlimited dimensions will grow along those dimensions if you assign data outside the currently defined range of indices.>>> # append along two unlimited dimensions by assigning to slice. ->>> nlats = len(rootgrp.dimensions["lat"]) ->>> nlons = len(rootgrp.dimensions["lon"]) ->>> print "temp shape before adding data = ",temp.shape +@@ -1733,7 +1739,7 @@>>> # append along two unlimited dimensions by assigning to slice. +>>> nlats = len(rootgrp.dimensions["lat"]) +>>> nlons = len(rootgrp.dimensions["lon"]) +>>> print "temp shape before adding data = ",temp.shape temp shape before adding data = (0, 0, 73, 144) >>> >>> from numpy.random import uniform >>> temp[0:5,0:10,:,:] = uniform(size=(5,10,nlats,nlons)) ->>> print "temp shape after adding data = ",temp.shape +>>> print "temp shape after adding data = ",temp.shape temp shape after adding data = (6, 10, 73, 144) >>> ->>> # levels have grown, but no values yet assigned. ->>> print "levels shape after adding pressure data = ",levels.shape +>>> # levels have grown, but no values yet assigned. +>>> print "levels shape after adding pressure data = ",levels.shape levels shape after adding pressure data = (10,)
6) Writing data to and retrieving data from a netCDF variNote that the size of the levels variable grows when data is appended along the
-level
dimension of the variabletemp
, even though no data has yet been assigned to levels.>>> # now, assign data to levels dimension variable. +@@ -1746,7 +1752,7 @@>>> # now, assign data to levels dimension variable. >>> levels[:] = [1000.,850.,700.,500.,300.,250.,200.,150.,100.,50.]
6) Writing data to and retrieving data from a netCDF vari than for numpy arrays. Only 1-d boolean arrays and integer sequences are allowed, and these indices work independently along each dimension (similar to the way vector subscripts work in fortran). This means that ->>> temp[0, 0, [0,1,2,3], [0,1,2,3]] +@@ -1762,14 +1768,14 @@>>> temp[0, 0, [0,1,2,3], [0,1,2,3]]
6) Writing data to and retrieving data from a netCDF vari it provides a very powerful way to extract data from multidimensional netCDF variables by using logical operations on the dimension arrays to create slices.For example,
->>> tempdat = temp[::2, [1,3,6], lats>0, lons>0] +>>> tempdat = temp[::2, [1,3,6], lats>0, lons>0]will extract time indices 0,2 and 4, pressure levels 850, 500 and 200 hPa, all Northern Hemisphere latitudes and Eastern Hemisphere longitudes, resulting in a numpy array of shape (3, 3, 36, 71).
->>> print "shape of fancy temp slice = ",tempdat.shape +@@ -1799,16 +1805,16 @@>>> print "shape of fancy temp slice = ",tempdat.shape shape of fancy temp slice = (3, 3, 36, 71)
7) Dealing with time coordinates. cftime package must be installed separately). Here's an example of how they can be used: ->>> # fill in times. +>>> # fill in times. >>> from datetime import datetime, timedelta >>> from netCDF4 import num2date, date2num >>> dates = [datetime(2001,3,1)+n*timedelta(hours=12) for n in range(temp.shape[0])] >>> times[:] = date2num(dates,units=times.units,calendar=times.calendar) ->>> print "time values (in units %s): " % times.units+"\n",times[:] +>>> print "time values (in units %s): " % times.units+"\n",times[:] time values (in units hours since January 1, 0001): [ 17533056. 17533068. 17533080. 17533092. 17533104.] >>> dates = num2date(times[:],units=times.units,calendar=times.calendar) ->>> print "dates corresponding to time values:\n",dates +>>> print "dates corresponding to time values:\n",dates dates corresponding to time values: [2001-03-01 00:00:00 2001-03-01 12:00:00 2001-03-02 00:00:00 2001-03-02 12:00:00 2001-03-03 00:00:00] @@ -1835,19 +1841,19 @@
8) Reading data from a multi-file netCDF dataset. must in be inNETCDF3_64BIT_OFFSET
,NETCDF3_64BIT_DATA
,NETCDF3_CLASSIC
orNETCDF4_CLASSIC
format (NETCDF4
formatted multi-file datasets are not supported). ->>> for nf in range(10): ->>> f = Dataset("mftest%s.nc" % nf,"w") ->>> f.createDimension("x",None) ->>> x = f.createVariable("x","i",("x",)) +>>> for nf in range(10): +>>> f = Dataset("mftest%s.nc" % nf,"w") +>>> f.createDimension("x",None) +>>> x = f.createVariable("x","i",("x",)) >>> x[0:10] = numpy.arange(nf*10,10*(nf+1)) >>> f.close()Now read all the files back in at once with
-MFDataset
>>> from netCDF4 import MFDataset ->>> f = MFDataset("mftest*nc") ->>> print f.variables["x"][:] +>>> from netCDF4 import MFDataset +>>> f = MFDataset("mftest*nc") +>>> print f.variables["x"][:] [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 @@ -1893,17 +1899,17 @@
9) Efficient compression of netCDF variables. 'lossy' instead of 'lossless', that is some precision in the data is sacrificed for the sake of disk space.In our example, try replacing the line
->>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",)) +>>> temp = rootgrp.createVariable("temp","f4",("time","level","lat","lon",))with
->>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True) +>>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True)and then
->>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True,least_significant_digit=3) +@@ -1922,31 +1928,31 @@>>> temp = dataset.createVariable("temp","f4",("time","level","lat","lon",),zlib=True,least_significant_digit=3)
10) Beyond homogeneous arrays of a fixed type - compoundcreateCompoundType
method of aDataset
orGroup
instance. Since there is no native complex data type in netcdf, compound types are handy for storing numpy complex arrays. Here's an example: ->>> f = Dataset("complex.nc","w") ->>> size = 3 # length of 1-d complex array ->>> # create sample complex data. +@@ -1958,22 +1964,22 @@>>> f = Dataset("complex.nc","w") +>>> size = 3 # length of 1-d complex array +>>> # create sample complex data. >>> datac = numpy.exp(1j*(1.+numpy.linspace(0, numpy.pi, size))) ->>> # create complex128 compound data type. ->>> complex128 = numpy.dtype([("real",numpy.float64),("imag",numpy.float64)]) ->>> complex128_t = f.createCompoundType(complex128,"complex128") ->>> # create a variable with this data type, write some data to it. ->>> f.createDimension("x_dim",None) ->>> v = f.createVariable("cmplx_var",complex128_t,"x_dim") ->>> data = numpy.empty(size,complex128) # numpy structured array ->>> data["real"] = datac.real; data["imag"] = datac.imag ->>> v[:] = data # write numpy structured array to netcdf compound var ->>> # close and reopen the file, check the contents. ->>> f.close(); f = Dataset("complex.nc") ->>> v = f.variables["cmplx_var"] ->>> datain = v[:] # read in all the data into a numpy structured array ->>> # create an empty numpy complex array +>>> # create complex128 compound data type. +>>> complex128 = numpy.dtype([("real",numpy.float64),("imag",numpy.float64)]) +>>> complex128_t = f.createCompoundType(complex128,"complex128") +>>> # create a variable with this data type, write some data to it. +>>> f.createDimension("x_dim",None) +>>> v = f.createVariable("cmplx_var",complex128_t,"x_dim") +>>> data = numpy.empty(size,complex128) # numpy structured array +>>> data["real"] = datac.real; data["imag"] = datac.imag +>>> v[:] = data # write numpy structured array to netcdf compound var +>>> # close and reopen the file, check the contents. +>>> f.close(); f = Dataset("complex.nc") +>>> v = f.variables["cmplx_var"] +>>> datain = v[:] # read in all the data into a numpy structured array +>>> # create an empty numpy complex array >>> datac2 = numpy.empty(datain.shape,numpy.complex128) ->>> # .. fill it with contents of structured array. ->>> datac2.real = datain["real"]; datac2.imag = datain["imag"] ->>> print datac.dtype,datac # original data +>>> # .. fill it with contents of structured array. +>>> datac2.real = datain["real"]; datac2.imag = datain["imag"] +>>> print datac.dtype,datac # original data complex128 [ 0.54030231+0.84147098j -0.84147098+0.54030231j -0.54030231-0.84147098j] >>> ->>> print datac2.dtype,datac2 # data from file +>>> print datac2.dtype,datac2 # data from file complex128 [ 0.54030231+0.84147098j -0.84147098+0.54030231j -0.54030231-0.84147098j]
10) Beyond homogeneous arrays of a fixed type - compound All of the compound types defined for aDataset
orGroup
are stored in a Python dictionary, just like variables and dimensions. As always, printing objects gives useful summary information in an interactive session: ->>> print f -<type "netCDF4._netCDF4.Dataset"> +@@ -1982,8 +1988,8 @@>>> print f +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: x_dim variables: cmplx_var groups: -<type "netCDF4._netCDF4.Variable"> ->>> print f.variables["cmplx_var"] +<type "netCDF4._netCDF4.Variable"> +>>> print f.variables["cmplx_var"] compound cmplx_var(x_dim) -compound data type: [("real", "<f8"), ("imag", "<f8")] +compound data type: [("real", "<f8"), ("imag", "<f8")] unlimited dimensions: x_dim current shape = (3,) >>> print f.cmptypes -OrderedDict([("complex128", <netCDF4.CompoundType object at 0x1029eb7e8>)]) ->>> print f.cmptypes["complex128"] -<type "netCDF4._netCDF4.CompoundType">: name = "complex128", numpy dtype = [(u"real","<f8"), (u"imag", "<f8")] +OrderedDict([("complex128", <netCDF4.CompoundType object at 0x1029eb7e8>)]) +>>> print f.cmptypes["complex128"] +<type "netCDF4._netCDF4.CompoundType">: name = "complex128", numpy dtype = [(u"real","<f8"), (u"imag", "<f8")]
11) Variable-length (vlen) data types. of variable length sequences having the same type. To create a variable-length data type, use thecreateVLType
method method of aDataset
orGroup
instance. ->>> f = Dataset("tst_vlen.nc","w") ->>> vlen_t = f.createVLType(numpy.int32, "phony_vlen") +@@ -1992,9 +1998,9 @@>>> f = Dataset("tst_vlen.nc","w") +>>> vlen_t = f.createVLType(numpy.int32, "phony_vlen")
11) Variable-length (vlen) data types. used (signed and unsigned integers, 32 and 64 bit floats, and characters), but compound data types cannot. A new variable can then be created using this datatype. ->>> x = f.createDimension("x",3) ->>> y = f.createDimension("y",4) ->>> vlvar = f.createVariable("phony_vlen_var", vlen_t, ("y","x")) +@@ -2005,13 +2011,13 @@>>> x = f.createDimension("x",3) +>>> y = f.createDimension("y",4) +>>> vlvar = f.createVariable("phony_vlen_var", vlen_t, ("y","x"))
11) Variable-length (vlen) data types. but of varying length. In this case, they contain 1-D numpyint32
arrays of random length between 1 and 10. ->>> import random +>>> import random >>> data = numpy.empty(len(y)*len(x),object) >>> for n in range(len(y)*len(x)): ->>> data[n] = numpy.arange(random.randint(1,10),dtype="int32")+1 +>>> data[n] = numpy.arange(random.randint(1,10),dtype="int32")+1 >>> data = numpy.reshape(data,(len(y),len(x))) >>> vlvar[:] = data ->>> print "vlen variable =\n",vlvar[:] +>>> print "vlen variable =\n",vlvar[:] vlen variable = [[[ 1 2 3 4 5 6 7 8 9 10] [1 2 3 4 5] [1 2 3 4 5 6 7 8]] [[1 2 3 4 5 6 7] [1 2 3 4 5 6] [1 2 3 4 5]] @@ -2019,19 +2025,19 @@
11) Variable-length (vlen) data types. [[ 1 2 3 4 5 6 7 8 9 10] [ 1 2 3 4 5 6 7 8 9 10] [1 2 3 4 5 6 7 8]]] >>> print f -<type "netCDF4._netCDF4.Dataset"> +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: x, y variables: phony_vlen_var groups: ->>> print f.variables["phony_vlen_var"] -<type "netCDF4._netCDF4.Variable"> +>>> print f.variables["phony_vlen_var"] +<type "netCDF4._netCDF4.Variable"> vlen phony_vlen_var(y, x) vlen data type: int32 unlimited dimensions: current shape = (4, 3) ->>> print f.VLtypes["phony_vlen"] -<type "netCDF4._netCDF4.VLType">: name = "phony_vlen", numpy dtype = int32 +>>> print f.VLtypes["phony_vlen"] +<type "netCDF4._netCDF4.VLType">: name = "phony_vlen", numpy dtype = int32@@ -2040,33 +2046,33 @@
11) Variable-length (vlen) data types. Instead, simply use the pythonstr
builtin (or a numpy string datatype with fixed length greater than 1) when calling thecreateVariable
method. ->>> z = f.createDimension("z",10) ->>> strvar = rootgrp.createVariable("strvar", str, "z") +>>> z = f.createDimension("z",10) +>>> strvar = rootgrp.createVariable("strvar", str, "z")In this example, an object array is filled with random python strings with random lengths between 2 and 12 characters, and the data in the object array is assigned to the vlen string variable.
->>> chars = "1234567890aabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" ->>> data = numpy.empty(10,"O") +@@ -2083,21 +2089,21 @@>>> chars = "1234567890aabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" +>>> data = numpy.empty(10,"O") >>> for n in range(10): >>> stringlen = random.randint(2,12) ->>> data[n] = "".join([random.choice(chars) for i in range(stringlen)]) +>>> data[n] = "".join([random.choice(chars) for i in range(stringlen)]) >>> strvar[:] = data ->>> print "variable-length string variable:\n",strvar[:] +>>> print "variable-length string variable:\n",strvar[:] variable-length string variable: [aDy29jPt 5DS9X8 jd7aplD b8t4RM jHh8hq KtaPWF9cQj Q1hHN5WoXSiT MMxsVeq tdLUzvVTzj] >>> print f -<type "netCDF4._netCDF4.Dataset"> +<type "netCDF4._netCDF4.Dataset"> root group (NETCDF4 file format): dimensions: x, y, z variables: phony_vlen_var, strvar groups: ->>> print f.variables["strvar"] -<type "netCDF4._netCDF4.Variable"> +>>> print f.variables["strvar"] +<type "netCDF4._netCDF4.Variable"> vlen strvar(z) -vlen data type: <type "str"> +vlen data type: <type "str"> unlimited dimensions: current size = (10,)
12) Enum data type. The base integer data type and a python dictionary describing the allowed values and their names are used to define an Enum data type usingcreateEnumType
. ->>> nc = Dataset('clouds.nc','w') ->>> # python dict with allowed values and their names. ->>> enum_dict = {u'Altocumulus': 7, u'Missing': 255, ->>> u'Stratus': 2, u'Clear': 0, ->>> u'Nimbostratus': 6, u'Cumulus': 4, u'Altostratus': 5, ->>> u'Cumulonimbus': 1, u'Stratocumulus': 3} ->>> # create the Enum type called 'cloud_t'. ->>> cloud_type = nc.createEnumType(numpy.uint8,'cloud_t',enum_dict) +@@ -2106,32 +2112,32 @@>>> nc = Dataset('clouds.nc','w') +>>> # python dict with allowed values and their names. +>>> enum_dict = {u'Altocumulus': 7, u'Missing': 255, +>>> u'Stratus': 2, u'Clear': 0, +>>> u'Nimbostratus': 6, u'Cumulus': 4, u'Altostratus': 5, +>>> u'Cumulonimbus': 1, u'Stratocumulus': 3} +>>> # create the Enum type called 'cloud_t'. +>>> cloud_type = nc.createEnumType(numpy.uint8,'cloud_t',enum_dict) >>> print cloud_type -<type 'netCDF4._netCDF4.EnumType'>: name = 'cloud_t', -numpy dtype = uint8, fields/values ={u'Cumulus': 4, -u'Altocumulus': 7, u'Missing': 255, -u'Stratus': 2, u'Clear': 0, -u'Cumulonimbus': 1, u'Stratocumulus': 3, -u'Nimbostratus': 6, u'Altostratus': 5} +<type 'netCDF4._netCDF4.EnumType'>: name = 'cloud_t', +numpy dtype = uint8, fields/values ={u'Cumulus': 4, +u'Altocumulus': 7, u'Missing': 255, +u'Stratus': 2, u'Clear': 0, +u'Cumulonimbus': 1, u'Stratocumulus': 3, +u'Nimbostratus': 6, u'Altostratus': 5}
12) Enum data type. cloud types in enum_dict. AValueError
will be raised if an attempt is made to write an integer value not associated with one of the specified names. ->>> time = nc.createDimension('time',None) ->>> # create a 1d variable of type 'cloud_type'. ->>> # The fill_value is set to the 'Missing' named value. +>>> time = nc.createDimension('time',None) +>>> # create a 1d variable of type 'cloud_type'. +>>> # The fill_value is set to the 'Missing' named value. >>> cloud_var = ->>> nc.createVariable('primary_cloud',cloud_type,'time', ->>> fill_value=enum_dict['Missing']) ->>> # write some data to the variable. ->>> cloud_var[:] = [enum_dict['Clear'],enum_dict['Stratus'], ->>> enum_dict['Cumulus'],enum_dict['Missing'], ->>> enum_dict['Cumulonimbus']] +>>> nc.createVariable('primary_cloud',cloud_type,'time', +>>> fill_value=enum_dict['Missing']) +>>> # write some data to the variable. +>>> cloud_var[:] = [enum_dict['Clear'],enum_dict['Stratus'], +>>> enum_dict['Cumulus'],enum_dict['Missing'], +>>> enum_dict['Cumulonimbus']] >>> nc.close() ->>> # reopen the file, read the data. ->>> nc = Dataset('clouds.nc') ->>> cloud_var = nc.variables['primary_cloud'] +>>> # reopen the file, read the data. +>>> nc = Dataset('clouds.nc') +>>> cloud_var = nc.variables['primary_cloud'] >>> print cloud_var -<type 'netCDF4._netCDF4.Variable'> +<type 'netCDF4._netCDF4.Variable'> enum primary_cloud(time) _FillValue: 255 enum data type: uint8 unlimited dimensions: time current shape = (5,) >>> print cloud_var.datatype.enum_dict -{u'Altocumulus': 7, u'Missing': 255, u'Stratus': 2, -u'Clear': 0, u'Nimbostratus': 6, u'Cumulus': 4, -u'Altostratus': 5, u'Cumulonimbus': 1, -u'Stratocumulus': 3} +{u'Altocumulus': 7, u'Missing': 255, u'Stratus': 2, +u'Clear': 0, u'Nimbostratus': 6, u'Cumulus': 4, +u'Altostratus': 5, u'Cumulonimbus': 1, +u'Stratocumulus': 3} >>> print cloud_var[:] [0 2 4 -- 1] >>> nc.close() @@ -2144,10 +2150,10 @@
13) Parallel IO. be built with parallel IO capabilities enabled. To use parallel IO, your program must be running in an MPI environment using mpi4py. ->>> from mpi4py import MPI +@@ -2156,7 +2162,7 @@>>> from mpi4py import MPI >>> import numpy as np >>> from netCDF4 import Dataset ->>> rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run) +>>> rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run)
13) Parallel IO. The parallel features of netcdf4-python are mostly transparent - when a new dataset is created or an existing dataset is opened, use theparallel
keyword to enable parallel access. ->>> nc = Dataset('parallel_tst.nc','w',parallel=True) +@@ -2164,8 +2170,8 @@>>> nc = Dataset('parallel_tst.nc','w',parallel=True)
13) Parallel IO. MPI communicator (MPI_COMM_WORLD
is used by default). Each process (or rank) can now write to the file indepedently. In this example the process rank is written to a different variable index on each task ->>> d = nc.createDimension('dim',4) ->>> v = nc.createVariable('var', numpy.int, 'dim') +>>> d = nc.createDimension('dim',4) +>>> v = nc.createVariable('var', numpy.int, 'dim') >>> v[rank] = rank >>> nc.close() @@ -2220,19 +2226,19 @@
14) Dealing with strings.U#
) array is created. When writing the data,stringtochar
is used to convert the numpy string array to an array of characters with one more dimension. For example, ->>> nc = Dataset('stringtest.nc','w',format='NETCDF4_CLASSIC') ->>> nc.createDimension('nchars',3) ->>> nc.createDimension('nstrings',None) ->>> v = nc.createVariable('strings','S1',('nstrings','nchars')) ->>> datain = numpy.array(['foo','bar'],dtype='S3') ->>> v[:] = stringtochar(datain) # manual conversion to char array ->>> v[:] # data returned as char array -[[b'f' b'o' b'o'] -[b'b' b'a' b'r']] ->>> v._Encoding = 'ascii' # this enables automatic conversion ->>> v[:] = datain # conversion to char array done internally ->>> v[:] # data returned in numpy string array -['foo' 'bar'] +@@ -2251,27 +2257,27 @@>>> nc = Dataset('stringtest.nc','w',format='NETCDF4_CLASSIC') +>>> nc.createDimension('nchars',3) +>>> nc.createDimension('nstrings',None) +>>> v = nc.createVariable('strings','S1',('nstrings','nchars')) +>>> datain = numpy.array(['foo','bar'],dtype='S3') +>>> v[:] = stringtochar(datain) # manual conversion to char array +>>> v[:] # data returned as char array +[[b'f' b'o' b'o'] +[b'b' b'a' b'r']] +>>> v._Encoding = 'ascii' # this enables automatic conversion +>>> v[:] = datain # conversion to char array done internally +>>> v[:] # data returned in numpy string array +['foo' 'bar'] >>> nc.close()
14) Dealing with strings. define the compound data type - the string dtype will be converted to character array dtype under the hood when creating the netcdf compound type. Here's an example: ->>> nc = Dataset('compoundstring_example.nc','w') ->>> dtype = numpy.dtype([('observation', 'f4'), - ('station_name','S80')]) ->>> station_data_t = nc.createCompoundType(dtype,'station_data') ->>> nc.createDimension('station',None) ->>> statdat = nc.createVariable('station_obs', station_data_t, ('station',)) +@@ -2527,7 +2533,7 @@>>> nc = Dataset('compoundstring_example.nc','w') +>>> dtype = numpy.dtype([('observation', 'f4'), + ('station_name','S80')]) +>>> station_data_t = nc.createCompoundType(dtype,'station_data') +>>> nc.createDimension('station',None) +>>> statdat = nc.createVariable('station_obs', station_data_t, ('station',)) >>> data = numpy.empty(2,dtype) ->>> data['observation'][:] = (123.,3.14) ->>> data['station_name'][:] = ('Boulder','New York') ->>> statdat.dtype # strings actually stored as character arrays -{'names':['observation','station_name'], 'formats':['<f4',('S1', (80,))], 'offsets':[0,4], 'itemsize':84, 'aligned':True} ->>> statdat[:] = data # strings converted to character arrays internally ->>> statdat[:] # character arrays converted back to strings -[(123. , 'Boulder') ( 3.14, 'New York')] +>>> data['observation'][:] = (123.,3.14) +>>> data['station_name'][:] = ('Boulder','New York') +>>> statdat.dtype # strings actually stored as character arrays +{'names':['observation','station_name'], 'formats':['<f4',('S1', (80,))], 'offsets':[0,4], 'itemsize':84, 'aligned':True} +>>> statdat[:] = data # strings converted to character arrays internally +>>> statdat[:] # character arrays converted back to strings +[(123. , 'Boulder') ( 3.14, 'New York')] >>> statdat[:].dtype -{'names':['observation','station_name'], 'formats':['<f4','S80'], 'offsets':[0,4], 'itemsize':84, 'aligned':True} ->>> statdat.set_auto_chartostring(False) # turn off auto-conversion ->>> statdat[:] = data.view(dtype=[('observation', 'f4'),('station_name','S1',10)]) ->>> statdat[:] # now structured array with char array subtype is returned -[(123. , ['B', 'o', 'u', 'l', 'd', 'e', 'r', '', '', '']) -( 3.14, ['N', 'e', 'w', ' ', 'Y', 'o', 'r', 'k', '', ''])] +{'names':['observation','station_name'], 'formats':['<f4','S80'], 'offsets':[0,4], 'itemsize':84, 'aligned':True} +>>> statdat.set_auto_chartostring(False) # turn off auto-conversion +>>> statdat[:] = data.view(dtype=[('observation', 'f4'),('station_name','S1',10)]) +>>> statdat[:] # now structured array with char array subtype is returned +[(123. , ['B', 'o', 'u', 'l', 'd', 'e', 'r', '', '', '']) +( 3.14, ['N', 'e', 'w', ' ', 'Y', 'o', 'r', 'k', '', ''])] >>> nc.close()Classes
Ancestors (in MRO)
- CompoundType
-- builtins.object
+- __builtin__.object
Class variables
@@ -2656,7 +2662,7 @@Static methods
Ancestors (in MRO)
- Dataset
-- builtins.object
+- __builtin__.object
Class variables
@@ -3173,10 +3179,10 @@Static methods
Returns a list of variables that match specific conditions.
Can pass in key=value parameters and variables are returned that contain all of the matches. For example,
->>> # Get variables with x-axis attribute. ->>> vs = nc.get_variables_by_attributes(axis='X') ->>> # Get variables with matching "standard_name" attribute ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity') +@@ -3184,11 +3190,11 @@>>> # Get variables with x-axis attribute. +>>> vs = nc.get_variables_by_attributes(axis='X') +>>> # Get variables with matching "standard_name" attribute +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')Static methods
callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example, ->>> # Get Axis variables ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) ->>> # Get variables that don't have an "axis" attribute +>>> # Get Axis variables +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) +>>> # Get variables that don't have an "axis" attribute >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None) ->>> # Get variables that have a "grid_mapping" attribute +>>> # Get variables that have a "grid_mapping" attribute >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)@@ -3543,7 +3549,7 @@Static methods
Ancestors (in MRO)
- Dimension
-- builtins.object
+- __builtin__.object
Class variables
@@ -3641,7 +3647,7 @@Static methods
Ancestors (in MRO)
- EnumType
-- builtins.object
+- __builtin__.object
Class variables
@@ -3729,15 +3735,22 @@Ancestors (in MRO)
Class variables
var cmptypes
++ Inheritance: +
+Dataset
.cmptypes
+The
cmptypes
dictionary maps the names of +compound types defined for theGroup
orDataset
to instances of the +CompoundType
class.@@ -3745,9 +3758,16 @@Class variables
var data_model
++ Inheritance: +
+Dataset
.data_model
+
data_model
describes the netCDF +data model version, one ofNETCDF3_CLASSIC
,NETCDF4
, +NETCDF4_CLASSIC
,NETCDF3_64BIT_OFFSET
orNETCDF3_64BIT_DATA
.@@ -3755,9 +3775,16 @@Class variables
var dimensions
++ Inheritance: +
+Dataset
.dimensions
+The
dimensions
dictionary maps the names of +dimensions defined for theGroup
orDataset
to instances of the +Dimension
class.@@ -3765,9 +3792,18 @@Class variables
var disk_format
++ Inheritance: +
+Dataset
.disk_format
+
disk_format
describes the underlying +file format, one ofNETCDF3
,HDF5
,HDF4
, +PNETCDF
,DAP2
,DAP4
orUNDEFINED
. Only available if using +netcdf C library version >= 4.3.1, otherwise will always return +UNDEFINED
.@@ -3775,9 +3811,16 @@Class variables
var enumtypes
++ Inheritance: +
+Dataset
.enumtypes
+The
enumtypes
dictionary maps the names of +Enum types defined for theGroup
orDataset
to instances of the +EnumType
class.@@ -3785,9 +3828,14 @@Class variables
var file_format
++ Inheritance: +
+Dataset
.file_format
+same as
data_model
, retained for backwards compatibility.@@ -3795,9 +3843,17 @@Class variables
var groups
++ Inheritance: +
+Dataset
.groups
+The groups dictionary maps the names of groups created for +this
Dataset
orGroup
to instances of theGroup
class (the +Dataset
class is simply a special case of theGroup
class which +describes the root group in the netCDF4 file).@@ -3805,9 +3861,15 @@Class variables
var keepweakref
++ Inheritance: +
+Dataset
.keepweakref
+If
True
, child Dimension and Variables objects only keep weak references to +the parent Dataset or Group.@@ -3826,9 +3888,15 @@Class variables
var parent
++ Inheritance: +
+Dataset
.parent
+@@ -3836,9 +3904,17 @@Class variables
var path
++ Inheritance: +
+Dataset
.path
+
path
shows the location of theGroup
in +theDataset
in a unix directory format (the names of groups in the +hierarchy separated by backslashes). ADataset
instance is the root +group, so the path is simply'/'
.@@ -3846,9 +3922,16 @@Class variables
+var variables
++ Inheritance: +
+Dataset
.variables
+The
variables
dictionary maps the names of variables +defined for thisDataset
orGroup
to instances of theVariable
+class.@@ -3856,9 +3939,16 @@Class variables
+var vltypes
++ Inheritance: +
+Dataset
.vltypes
+The
vltypes
dictionary maps the names of +variable-length types defined for theGroup
orDataset
to instances of the +VLType
class.@@ -3895,6 +3985,10 @@Static methods
def close(
self)
+ Inheritance: +
@@ -3911,10 +4005,14 @@Dataset
.close
+Static methods
def createCompoundType(
self, datatype, datatype_name)
+ Inheritance: +
-Dataset
.createCompoundType
+Creates a new compound data type named
datatype_name
from the numpy ++Creates a new compound data type named
datatype_name
from the numpy dtype objectdatatype
.Note: If the new compound data type contains other compound data types (i.e. it is a 'nested' compound type, where not all of the elements @@ -3933,10 +4031,14 @@
Static methods
def createDimension(
self, dimname, size=None)
+ Inheritance: +
-Dataset
.createDimension
+Creates a new dimension with the given
+dimname
andsize
.+Creates a new dimension with the given
dimname
andsize
.
size
must be a positive integer orNone
, which stands for "unlimited" (default isNone
). Specifying a size of 0 also results in an unlimited dimension. The return value is theDimension
@@ -3955,10 +4057,14 @@Static methods
def createEnumType(
self, datatype, datatype_name, enum_dict)
+ Inheritance: +
-Dataset
.createEnumType
+Creates a new Enum data type named
datatype_name
from a numpy ++Creates a new Enum data type named
datatype_name
from a numpy integer dtype objectdatatype
, and a python dictionary defining the enum fields and values.The return value is the
EnumType
class instance describing the new @@ -3974,10 +4080,14 @@Static methods
def createGroup(
self, groupname)
+ Inheritance: +
-Dataset
.createGroup
+Creates a new
+Group
with the givengroupname
.+Creates a new
Group
with the givengroupname
.If
groupname
is specified as a path, using forward slashes as in unix to separate components, then intermediate groups will be created as necessary (analogous tomkdir -p
in unix). For example, @@ -3997,10 +4107,14 @@Static methods
def createVLType(
self, datatype, datatype_name)
+ Inheritance: +
-Dataset
.createVLType
++Creates a new VLEN data type named
datatype_name
from a numpy +@@ -4015,10 +4129,14 @@Creates a new VLEN data type named
datatype_name
from a numpy dtype objectdatatype
.The return value is the
VLType
class instance describing the new datatype.Static methods
def createVariable(
self, varname, datatype, dimensions=(), zlib=False, complevel=4, shuffle=True, fletcher32=False, contiguous=False, chunksizes=None, endian='native', least_significant_digit=None, fill_value=None)
+ Inheritance: +
-Dataset
.createVariable
++Creates a new variable with the given
varname
,datatype
, and ++Creates a new variable with the given
varname
,datatype
, anddimensions
. If dimensions are not given, the variable is assumed to be a scalar.If
varname
is specified as a path, using forward slashes as in unix to @@ -4128,10 +4246,14 @@Static methods
def delncattr(
self,name,value)
+ Inheritance: +
-Dataset
.delncattr
++delete a netCDF dataset or group attribute. Use if you need to delete a +
delete a netCDF dataset or group attribute. Use if you need to delete a netCDF attribute with the same name as one of the reserved python attributes.
@@ -4145,10 +4267,14 @@+Static methods
def filepath(
self,encoding=None)
+ Inheritance: +
-Dataset
.filepath
++Get the file system path (or the opendap URL) which was used to +
@@ -4163,16 +4289,20 @@Get the file system path (or the opendap URL) which was used to open/create the Dataset. Requires netcdf >= 4.1.2. The path is decoded into a string using
sys.getfilesystemencoding()
by default, this can be changed using theencoding
kwarg.Static methods
def get_variables_by_attributes(
...)
+ Inheritance: +
-Dataset
.get_variables_by_attributes
++Returns a list of variables that match specific conditions.
++Returns a list of variables that match specific conditions.
Can pass in key=value parameters and variables are returned that contain all of the matches. For example,
-+>>> # Get variables with x-axis attribute. ->>> vs = nc.get_variables_by_attributes(axis='X') ->>> # Get variables with matching "standard_name" attribute ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity') +@@ -4180,11 +4310,11 @@>>> # Get variables with x-axis attribute. +>>> vs = nc.get_variables_by_attributes(axis='X') +>>> # Get variables with matching "standard_name" attribute +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')Static methods
callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example, ->>> # Get Axis variables ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) ->>> # Get variables that don't have an "axis" attribute +>>> # Get Axis variables +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) +>>> # Get variables that don't have an "axis" attribute >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None) ->>> # Get variables that have a "grid_mapping" attribute +>>> # Get variables that have a "grid_mapping" attribute >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)@@ -4198,10 +4328,14 @@+Static methods
def getncattr(
self,name)
+ Inheritance: +
-Dataset
.getncattr
++retrieve a netCDF dataset or group attribute. +
+retrieve a netCDF dataset or group attribute. Use if you need to get a netCDF attribute with the same name as one of the reserved python attributes.
option kwarg
encoding
can be used to specify the @@ -4217,10 +4351,14 @@Static methods
def isopen(
...)
+ Inheritance: +
-Dataset
.isopen
++is the Dataset open or closed?
is the Dataset open or closed?
@@ -4232,10 +4370,14 @@Static methods
def ncattrs(
self)
+ Inheritance: +
- +Dataset
.ncattrs
+@@ -4247,10 +4389,14 @@Static methods
def renameAttribute(
self, oldname, newname)
+ Inheritance: +
- +Dataset
.renameAttribute
+@@ -4262,10 +4408,14 @@Static methods
def renameDimension(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameDimension
++rename a
Dimension
namedoldname
tonewname
.rename a
Dimension
namedoldname
tonewname
.@@ -4277,10 +4427,14 @@Static methods
def renameGroup(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameGroup
++rename a
Group
namedoldname
tonewname
(requires netcdf >= 4.3.1).rename a
Group
namedoldname
tonewname
(requires netcdf >= 4.3.1).@@ -4292,10 +4446,14 @@Static methods
def renameVariable(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameVariable
++rename a
Variable
namedoldname
tonewname
rename a
Variable
namedoldname
tonewname
@@ -4307,10 +4465,14 @@Static methods
def set_always_mask(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_always_mask
+Call
set_always_mask
for all variables contained in ++Call
set_always_mask
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.
True_or_False
: Boolean determining if automatic conversion of @@ -4330,10 +4492,14 @@Static methods
def set_auto_chartostring(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_chartostring
+Call
set_auto_chartostring
for all variables contained in thisDataset
or ++Call
set_auto_chartostring
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.
True_or_False
: Boolean determining if automatic conversion of all character arrays <--> string arrays should be performed for @@ -4352,10 +4518,14 @@Static methods
def set_auto_mask(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_mask
+Call
set_auto_mask
for all variables contained in thisDataset
or ++Call
set_auto_mask
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -4372,10 +4542,14 @@
True_or_False
: Boolean determining if automatic conversion to masked arrays shall be applied for all variables.Static methods
def set_auto_maskandscale(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_maskandscale
+Call
set_auto_maskandscale
for all variables contained in thisDataset
or ++Call
set_auto_maskandscale
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -4392,10 +4566,14 @@
True_or_False
: Boolean determining if automatic conversion to masked arrays and variable scaling shall be applied for all variables.Static methods
def set_auto_scale(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_scale
+Call
set_auto_scale
for all variables contained in thisDataset
or ++Call
set_auto_scale
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -4412,10 +4590,14 @@
True_or_False
: Boolean determining if automatic variable scaling shall be applied for all variables.Static methods
def set_fill_off(
self)
+ Inheritance: +
-Dataset
.set_fill_off
++Sets the fill mode for a
+Dataset
open for writing tooff
.@@ -4430,10 +4612,14 @@Sets the fill mode for a
Dataset
open for writing tooff
.This will prevent the data from being pre-filled with fill values, which may result in some performance improvements. However, you must then make sure the data is actually written before being read.
Static methods
def set_fill_on(
self)
+ Inheritance: +
-Dataset
.set_fill_on
+Sets the fill mode for a
+Dataset
open for writing toon
.+Sets the fill mode for a
Dataset
open for writing toon
.This causes data to be pre-filled with fill values. The fill values can be controlled by the variable's
_Fill_Value
attribute, but is usually sufficient to the use the netCDF default_Fill_Value
(defined @@ -4452,10 +4638,14 @@Static methods
def setncattr(
self,name,value)
+ Inheritance: +
-Dataset
.setncattr
+set a netCDF dataset or group attribute using name,value pair. +
set a netCDF dataset or group attribute using name,value pair. Use if you need to set a netCDF attribute with the with the same name as one of the reserved python attributes.
@@ -4469,10 +4659,14 @@+Static methods
def setncattr_string(
self,name,value)
+ Inheritance: +
-Dataset
.setncattr_string
+set a netCDF dataset or group string attribute using name,value pair. +
set a netCDF dataset or group string attribute using name,value pair. Use if you need to ensure that a netCDF attribute is created with type
NC_STRING
if the file format isNETCDF4
.@@ -4486,10 +4680,14 @@+Static methods
def setncatts(
self,attdict)
+ Inheritance: +
-Dataset
.setncatts
++set a bunch of netCDF dataset or group attributes at once using a python dictionary. +
@@ -4504,10 +4702,14 @@set a bunch of netCDF dataset or group attributes at once using a python dictionary. This may be faster when setting a lot of attributes for a
NETCDF3
formatted file, since nc_redef/nc_enddef is not called in between setting each attributeStatic methods
def sync(
self)
+ Inheritance: +
-Dataset
.sync
++Writes all buffered data in the
Dataset
to the disk file.Writes all buffered data in the
Dataset
to the disk file.@@ -4526,18 +4728,18 @@Static methods
or NETCDF3_64BIT_DATA format (NETCDF4
Datasets won't work).Adapted from pycdf by Andre Gosselin.
Example usage (See
-__init__
for more details):>>> import numpy ->>> # create a series of netCDF files with a variable sharing ->>> # the same unlimited dimension. +>>> import numpy as np +>>> # create a series of netCDF files with a variable sharing +>>> # the same unlimited dimension. >>> for nf in range(10): ->>> f = Dataset("mftest%s.nc" % nf,"w") ->>> f.createDimension("x",None) ->>> x = f.createVariable("x","i",("x",)) ->>> x[0:10] = numpy.arange(nf*10,10*(nf+1)) +>>> f = Dataset("mftest%s.nc" % nf,"w",format='NETCDF4_CLASSIC') +>>> f.createDimension("x",None) +>>> x = f.createVariable("x","i",("x",)) +>>> x[0:10] = np.arange(nf*10,10*(nf+1)) >>> f.close() ->>> # now read all those files in at once, in one Dataset. ->>> f = MFDataset("mftest*nc") ->>> print f.variables["x"][:] +>>> # now read all those files in at once, in one Dataset. +>>> f = MFDataset("mftest*nc") +>>> print f.variables["x"][:] [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 @@ -4552,15 +4754,22 @@Ancestors (in MRO)
Class variables
var cmptypes
++ Inheritance: +
+Dataset
.cmptypes
+The
cmptypes
dictionary maps the names of +compound types defined for theGroup
orDataset
to instances of the +CompoundType
class.@@ -4568,9 +4777,16 @@Class variables
var data_model
++ Inheritance: +
+Dataset
.data_model
+
data_model
describes the netCDF +data model version, one ofNETCDF3_CLASSIC
,NETCDF4
, +NETCDF4_CLASSIC
,NETCDF3_64BIT_OFFSET
orNETCDF3_64BIT_DATA
.@@ -4578,9 +4794,16 @@Class variables
var dimensions
++ Inheritance: +
+Dataset
.dimensions
+The
dimensions
dictionary maps the names of +dimensions defined for theGroup
orDataset
to instances of the +Dimension
class.@@ -4588,9 +4811,18 @@Class variables
var disk_format
++ Inheritance: +
+Dataset
.disk_format
+
disk_format
describes the underlying +file format, one ofNETCDF3
,HDF5
,HDF4
, +PNETCDF
,DAP2
,DAP4
orUNDEFINED
. Only available if using +netcdf C library version >= 4.3.1, otherwise will always return +UNDEFINED
.@@ -4598,9 +4830,16 @@Class variables
var enumtypes
++ Inheritance: +
+Dataset
.enumtypes
+The
enumtypes
dictionary maps the names of +Enum types defined for theGroup
orDataset
to instances of the +EnumType
class.@@ -4608,9 +4847,14 @@Class variables
var file_format
++ Inheritance: +
+Dataset
.file_format
+same as
data_model
, retained for backwards compatibility.@@ -4618,9 +4862,17 @@Class variables
var groups
++ Inheritance: +
+Dataset
.groups
+The groups dictionary maps the names of groups created for +this
Dataset
orGroup
to instances of theGroup
class (the +Dataset
class is simply a special case of theGroup
class which +describes the root group in the netCDF4 file).@@ -4628,9 +4880,15 @@Class variables
var keepweakref
++ Inheritance: +
+Dataset
.keepweakref
+If
True
, child Dimension and Variables objects only keep weak references to +the parent Dataset or Group.@@ -4638,9 +4896,15 @@Class variables
var parent
++ Inheritance: +
+Dataset
.parent
+@@ -4648,9 +4912,17 @@Class variables
var path
++ Inheritance: +
+Dataset
.path
+
path
shows the location of theGroup
in +theDataset
in a unix directory format (the names of groups in the +hierarchy separated by backslashes). ADataset
instance is the root +group, so the path is simply'/'
.@@ -4658,9 +4930,16 @@Class variables
+var variables
++ Inheritance: +
+Dataset
.variables
+The
variables
dictionary maps the names of variables +defined for thisDataset
orGroup
to instances of theVariable
+class.@@ -4668,83 +4947,35 @@Class variables
-var vltypes
- - - --- -Static methods
- --- - - +--def __init__(
self, files, check=False, aggdim=None, exclude=[], master_file=None)
-Inheritance: -
- - -Dataset
.__init__
+Dataset
.vltypes
--
__init__(self, files, check=False, aggdim=None, exclude=[])
Open a Dataset spanning multiple files, making it look as if it was a -single file. Variables in the list of files that share the same -dimension (specified with the keyword
-aggdim
) are aggregated. If -aggdim
is not specified, the unlimited is aggregated. Currently, -aggdim
must be the leftmost (slowest varying) dimension of each -of the variables to be aggregated.-
files
: either a sequence of netCDF files or a string with a -wildcard (converted to a sorted list of files using glob) If -themaster_file
kwarg is not specified, the first file -in the list will become the "master" file, defining all the -variables with an aggregation dimension which may span -subsequent files. Attribute access returns attributes only from "master" -file. The files are always opened in read-only mode.-
check
: True if you want to do consistency checking to ensure the -correct variables structure for all of the netcdf files. Checking makes -the initialization of the MFDataset instance much slower. Default is -False.-
aggdim
: The name of the dimension to aggregate over (must -be the leftmost dimension of each of the variables to be aggregated). -If None (default), aggregate over the unlimited dimension.-
exclude
: A list of variable names to exclude from aggregation. -Default is an empty list.
master_file
: file to use as "master file", defining all the -variables with an aggregation dimension and all global attributes.-- -Static methods
+def createCompoundType(
self, datatype, datatype_name)
+ Inheritance: +
-Dataset
.createCompoundType
+Creates a new compound data type named
datatype_name
from the numpy ++Creates a new compound data type named
datatype_name
from the numpy dtype objectdatatype
.Note: If the new compound data type contains other compound data types (i.e. it is a 'nested' compound type, where not all of the elements @@ -4763,10 +4994,14 @@
Static methods
def createDimension(
self, dimname, size=None)
+ Inheritance: +
-Dataset
.createDimension
+Creates a new dimension with the given
+dimname
andsize
.+Creates a new dimension with the given
dimname
andsize
.
size
must be a positive integer orNone
, which stands for "unlimited" (default isNone
). Specifying a size of 0 also results in an unlimited dimension. The return value is theDimension
@@ -4785,10 +5020,14 @@Static methods
def createEnumType(
self, datatype, datatype_name, enum_dict)
+ Inheritance: +
-Dataset
.createEnumType
+Creates a new Enum data type named
datatype_name
from a numpy ++Creates a new Enum data type named
datatype_name
from a numpy integer dtype objectdatatype
, and a python dictionary defining the enum fields and values.The return value is the
EnumType
class instance describing the new @@ -4804,10 +5043,14 @@Static methods
def createGroup(
self, groupname)
+ Inheritance: +
-Dataset
.createGroup
+Creates a new
+Group
with the givengroupname
.+Creates a new
Group
with the givengroupname
.If
groupname
is specified as a path, using forward slashes as in unix to separate components, then intermediate groups will be created as necessary (analogous tomkdir -p
in unix). For example, @@ -4827,10 +5070,14 @@Static methods
def createVLType(
self, datatype, datatype_name)
+ Inheritance: +
-Dataset
.createVLType
++Creates a new VLEN data type named
datatype_name
from a numpy +@@ -4845,10 +5092,14 @@Creates a new VLEN data type named
datatype_name
from a numpy dtype objectdatatype
.The return value is the
VLType
class instance describing the new datatype.Static methods
def createVariable(
self, varname, datatype, dimensions=(), zlib=False, complevel=4, shuffle=True, fletcher32=False, contiguous=False, chunksizes=None, endian='native', least_significant_digit=None, fill_value=None)
+ Inheritance: +
-Dataset
.createVariable
+Creates a new variable with the given
varname
,datatype
, and ++Creates a new variable with the given
varname
,datatype
, anddimensions
. If dimensions are not given, the variable is assumed to be a scalar.If
varname
is specified as a path, using forward slashes as in unix to @@ -4958,10 +5209,14 @@Static methods
def delncattr(
self,name,value)
+ Inheritance: +
-Dataset
.delncattr
++delete a netCDF dataset or group attribute. Use if you need to delete a +
delete a netCDF dataset or group attribute. Use if you need to delete a netCDF attribute with the same name as one of the reserved python attributes.
@@ -4975,10 +5230,14 @@+Static methods
def filepath(
self,encoding=None)
+ Inheritance: +
-Dataset
.filepath
++Get the file system path (or the opendap URL) which was used to +
@@ -4993,16 +5252,20 @@Get the file system path (or the opendap URL) which was used to open/create the Dataset. Requires netcdf >= 4.1.2. The path is decoded into a string using
sys.getfilesystemencoding()
by default, this can be changed using theencoding
kwarg.Static methods
def get_variables_by_attributes(
...)
+ Inheritance: +
-Dataset
.get_variables_by_attributes
++Returns a list of variables that match specific conditions.
++Returns a list of variables that match specific conditions.
Can pass in key=value parameters and variables are returned that contain all of the matches. For example,
-+>>> # Get variables with x-axis attribute. ->>> vs = nc.get_variables_by_attributes(axis='X') ->>> # Get variables with matching "standard_name" attribute ->>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity') +@@ -5010,11 +5273,11 @@>>> # Get variables with x-axis attribute. +>>> vs = nc.get_variables_by_attributes(axis='X') +>>> # Get variables with matching "standard_name" attribute +>>> vs = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity')Static methods
callable returns True. The callable should accept a single parameter, the attribute value. None is given as the attribute value when the attribute does not exist on the variable. For example, ->>> # Get Axis variables ->>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) ->>> # Get variables that don't have an "axis" attribute +>>> # Get Axis variables +>>> vs = nc.get_variables_by_attributes(axis=lambda v: v in ['X', 'Y', 'Z', 'T']) +>>> # Get variables that don't have an "axis" attribute >>> vs = nc.get_variables_by_attributes(axis=lambda v: v is None) ->>> # Get variables that have a "grid_mapping" attribute +>>> # Get variables that have a "grid_mapping" attribute >>> vs = nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)@@ -5028,10 +5291,14 @@+Static methods
def getncattr(
self,name)
+ Inheritance: +
-Dataset
.getncattr
+- - -retrieve a netCDF dataset or group attribute. +
+retrieve a netCDF dataset or group attribute. Use if you need to get a netCDF attribute with the same name as one of the reserved python attributes.
option kwarg
encoding
can be used to specify the @@ -5047,26 +5314,14 @@Static methods
def isopen(
...)
+ Inheritance: +
-Dataset
.isopen
+-is the Dataset open or closed?
-- --+-- - - - -def ncattrs(
self)
-+-
ncattrs(self)
return the netcdf attribute names from the master file.
is the Dataset open or closed?
@@ -5078,10 +5333,14 @@Static methods
def renameAttribute(
self, oldname, newname)
+ Inheritance: +
- +Dataset
.renameAttribute
+@@ -5093,10 +5352,14 @@Static methods
def renameDimension(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameDimension
++rename a
Dimension
namedoldname
tonewname
.rename a
Dimension
namedoldname
tonewname
.@@ -5108,10 +5371,14 @@Static methods
def renameGroup(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameGroup
++rename a
Group
namedoldname
tonewname
(requires netcdf >= 4.3.1).rename a
Group
namedoldname
tonewname
(requires netcdf >= 4.3.1).@@ -5123,10 +5390,14 @@Static methods
def renameVariable(
self, oldname, newname)
+ Inheritance: +
-Dataset
.renameVariable
++rename a
Variable
namedoldname
tonewname
rename a
Variable
namedoldname
tonewname
@@ -5138,10 +5409,14 @@Static methods
def set_always_mask(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_always_mask
+Call
set_always_mask
for all variables contained in ++Call
set_always_mask
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.
True_or_False
: Boolean determining if automatic conversion of @@ -5161,10 +5436,14 @@Static methods
def set_auto_chartostring(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_chartostring
+Call
set_auto_chartostring
for all variables contained in thisDataset
or ++Call
set_auto_chartostring
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.
True_or_False
: Boolean determining if automatic conversion of all character arrays <--> string arrays should be performed for @@ -5183,10 +5462,14 @@Static methods
def set_auto_mask(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_mask
+Call
set_auto_mask
for all variables contained in thisDataset
or ++Call
set_auto_mask
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -5203,10 +5486,14 @@
True_or_False
: Boolean determining if automatic conversion to masked arrays shall be applied for all variables.Static methods
def set_auto_maskandscale(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_maskandscale
+Call
set_auto_maskandscale
for all variables contained in thisDataset
or ++Call
set_auto_maskandscale
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -5223,10 +5510,14 @@
True_or_False
: Boolean determining if automatic conversion to masked arrays and variable scaling shall be applied for all variables.Static methods
def set_auto_scale(
self, True_or_False)
+ Inheritance: +
-Dataset
.set_auto_scale
+Call
set_auto_scale
for all variables contained in thisDataset
or ++Call
set_auto_scale
for all variables contained in thisDataset
orGroup
, as well as for all variables in all its subgroups.@@ -5243,10 +5534,14 @@
True_or_False
: Boolean determining if automatic variable scaling shall be applied for all variables.Static methods
def set_fill_off(
self)
+ Inheritance: +
-Dataset
.set_fill_off
++Sets the fill mode for a
+Dataset
open for writing tooff
.@@ -5261,10 +5556,14 @@Sets the fill mode for a
Dataset
open for writing tooff
.This will prevent the data from being pre-filled with fill values, which may result in some performance improvements. However, you must then make sure the data is actually written before being read.
Static methods
def set_fill_on(
self)
+ Inheritance: +
-Dataset
.set_fill_on
+Sets the fill mode for a
+Dataset
open for writing toon
.+Sets the fill mode for a
Dataset
open for writing toon
.This causes data to be pre-filled with fill values. The fill values can be controlled by the variable's
_Fill_Value
attribute, but is usually sufficient to the use the netCDF default_Fill_Value
(defined @@ -5283,10 +5582,14 @@Static methods
def setncattr(
self,name,value)
+ Inheritance: +
-Dataset
.setncattr
+set a netCDF dataset or group attribute using name,value pair. +
set a netCDF dataset or group attribute using name,value pair. Use if you need to set a netCDF attribute with the with the same name as one of the reserved python attributes.
@@ -5300,10 +5603,14 @@+Static methods
def setncattr_string(
self,name,value)
+ Inheritance: +
-Dataset
.setncattr_string
++ +set a netCDF dataset or group string attribute using name,value pair. +
set a netCDF dataset or group string attribute using name,value pair. Use if you need to ensure that a netCDF attribute is created with type
NC_STRING
if the file format isNETCDF4
.@@ -5317,10 +5624,14 @@+Static methods
def setncatts(
self,attdict)
+ Inheritance: +
-Dataset
.setncatts
++set a bunch of netCDF dataset or group attributes at once using a python dictionary. +
@@ -5335,10 +5646,95 @@set a bunch of netCDF dataset or group attributes at once using a python dictionary. This may be faster when setting a lot of attributes for a
NETCDF3
formatted file, since nc_redef/nc_enddef is not called in between setting each attributeStatic methods
def sync(
self)
+ Inheritance: +
-Dataset
.sync
++Writes all buffered data in the
Dataset
to the disk file.+Writes all buffered data in the
Dataset
to the disk file.++ +Methods
+ +++ + +++ +def __init__(
self, files, check=False, aggdim=None, exclude=[])
++ Inheritance: +
+ + + +Dataset
.__init__
++Open a Dataset spanning multiple files, making it look as if it was a +single file. Variables in the list of files that share the same +dimension (specified with the keyword
+aggdim
) are aggregated. If +aggdim
is not specified, the unlimited is aggregated. Currently, +aggdim
must be the leftmost (slowest varying) dimension of each +of the variables to be aggregated.+
files
: either a sequence of netCDF files or a string with a +wildcard (converted to a sorted list of files using glob) If +themaster_file
kwarg is not specified, the first file +in the list will become the "master" file, defining all the +variables with an aggregation dimension which may span +subsequent files. Attribute access returns attributes only from "master" +file. The files are always opened in read-only mode.+
check
: True if you want to do consistency checking to ensure the +correct variables structure for all of the netcdf files. Checking makes +the initialization of the MFDataset instance much slower. Default is +False.+
aggdim
: The name of the dimension to aggregate over (must +be the leftmost dimension of each of the variables to be aggregated). +If None (default), aggregate over the unlimited dimension.+
exclude
: A list of variable names to exclude from aggregation. +Default is an empty list.
master_file
: file to use as "master file", defining all the +variables with an aggregation dimension and all global attributes.++ +++ + +++ +def close(
self)
++ Inheritance: +
+ + + +Dataset
.close
++close all the open files.
++ ++++ +def ncattrs(
self)
++ Inheritance: +
+ + + +Dataset
.ncattrs
+return the netcdf attribute names from the master file.
@@ -5354,27 +5750,27 @@Static methods
Class providing an interface to a MFDataset time Variable by imposing a unique common time unit and/or calendar to all files.
Example usage (See
-__init__
for more details):>>> import numpy ->>> f1 = Dataset("mftest_1.nc","w", format="NETCDF4_CLASSIC") ->>> f2 = Dataset("mftest_2.nc","w", format="NETCDF4_CLASSIC") ->>> f1.createDimension("time",None) ->>> f2.createDimension("time",None) ->>> t1 = f1.createVariable("time","i",("time",)) ->>> t2 = f2.createVariable("time","i",("time",)) ->>> t1.units = "days since 2000-01-01" ->>> t2.units = "days since 2000-02-01" ->>> t1.calendar = "standard" ->>> t2.calendar = "standard" +>>> import numpy +>>> f1 = Dataset("mftest_1.nc","w", format="NETCDF4_CLASSIC") +>>> f2 = Dataset("mftest_2.nc","w", format="NETCDF4_CLASSIC") +>>> f1.createDimension("time",None) +>>> f2.createDimension("time",None) +>>> t1 = f1.createVariable("time","i",("time",)) +>>> t2 = f2.createVariable("time","i",("time",)) +>>> t1.units = "days since 2000-01-01" +>>> t2.units = "days since 2000-02-01" +>>> t1.calendar = "standard" +>>> t2.calendar = "standard" >>> t1[:] = numpy.arange(31) >>> t2[:] = numpy.arange(30) >>> f1.close() >>> f2.close() ->>> # Read the two files in at once, in one Dataset. ->>> f = MFDataset("mftest*nc") ->>> t = f.variables["time"] +>>> # Read the two files in at once, in one Dataset. +>>> f = MFDataset("mftest*nc") +>>> t = f.variables["time"] >>> print t.units days since 2000-01-01 ->>> print t[32] # The value written in the file, inconsistent with the MF time units. +>>> print t[32] # The value written in the file, inconsistent with the MF time units. 1 >>> T = MFTime(t) >>> print T[32] @@ -5389,9 +5785,9 @@Ancestors (in MRO)
-
- MFTime
- netCDF4._netCDF4._Variable
-- builtins.object
+- __builtin__.object
Static methods
+Methods
@@ -5401,8 +5797,7 @@Static methods
--
__init__(self, time, units=None, calendar=None)
Create a time Variable with units consistent across a multifile +
Create a time Variable with units consistent across a multifile dataset.
time
: Time variable from aMFDataset
.
units
: Time units, for example,'days since 1979-01-01'
. IfNone
, @@ -5419,7 +5814,7 @@Static methods
-@@ -5433,7 +5828,7 @@def ncattrs(
self)
+def ncattrs(
...)
Static methods
-@@ -5447,7 +5842,7 @@def set_auto_chartostring(
self, val)
+def set_auto_chartostring(
...)
Static methods
-@@ -5461,7 +5856,7 @@def set_auto_mask(
self, val)
+def set_auto_mask(
...)
Static methods
-def