Skip to content

Commit

Permalink
ENH: provide dotted (attribute) access in stores (e.g. store.df == st…
Browse files Browse the repository at this point in the history
…ore['df'])
  • Loading branch information
jreback committed Feb 7, 2013
1 parent eb2c048 commit 7065ff0
Show file tree
Hide file tree
Showing 6 changed files with 125 additions and 61 deletions.
6 changes: 5 additions & 1 deletion RELEASE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,11 @@ Where to get it
* Binary installers on PyPI: http://pypi.python.org/pypi/pandas
* Documentation: http://pandas.pydata.org

- Fix weird PyTables error when using too many selectors in a where
``HDFStore``

- Fix weird PyTables error when using too many selectors in a where
- Provide dotted attribute access to ``get`` from stores (e.g. store.df == store['df'])
- Internally, change all variables to be private-like (now have leading underscore)

pandas 0.10.1
=============
Expand Down
3 changes: 3 additions & 0 deletions doc/source/io.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1021,6 +1021,9 @@ In a current or later Python session, you can retrieve stored objects:
# store.get('df') is an equivalent method
store['df']
# dotted (attribute) access provides get as well
store.df
Deletion of the object specified by the key

.. ipython:: python
Expand Down
18 changes: 18 additions & 0 deletions doc/source/v0.10.2.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
.. _whatsnew_0102:

v0.10.2 (February ??, 2013)
---------------------------

This is a minor release from 0.10.1 and includes many new features and
enhancements along with a large number of bug fixes. There are also a number of
important API changes that long-time pandas users should pay close attention
to.

**Enhancements**

- In ``HDFStore``, provide dotted attribute access to ``get`` from stores (e.g. store.df == store['df'])

See the `full release notes
<https://github.com/pydata/pandas/blob/master/RELEASE.rst>`__ or issue tracker
on GitHub for a complete list.

2 changes: 2 additions & 0 deletions doc/source/whatsnew.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ What's New

These are new features and improvements of note in each release.

.. include:: v0.10.2.txt

.. include:: v0.10.1.txt

.. include:: v0.10.0.txt
Expand Down
121 changes: 65 additions & 56 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,19 +197,19 @@ def __init__(self, path, mode='a', complevel=None, complib=None,
except ImportError: # pragma: no cover
raise Exception('HDFStore requires PyTables')

self.path = path
self.mode = mode
self.handle = None
self.complevel = complevel
self.complib = complib
self.fletcher32 = fletcher32
self.filters = None
self._path = path
self._mode = mode
self._handle = None
self._complevel = complevel
self._complib = complib
self._fletcher32 = fletcher32
self._filters = None
self.open(mode=mode, warn=False)

@property
def root(self):
""" return the root node """
return self.handle.root
return self._handle.root

def __getitem__(self, key):
return self.get(key)
Expand All @@ -220,10 +220,19 @@ def __setitem__(self, key, value):
def __delitem__(self, key):
return self.remove(key)

def __getattr__(self, name):
""" allow attribute access to get stores """
try:
return self.get(name)
except:
pass
raise AttributeError("'%s' object has no attribute '%s'" %
(type(self).__name__, name))

def __contains__(self, key):
""" check for existance of this key
can match the exact pathname or the pathnm w/o the leading '/'
"""
"""
node = self.get_node(key)
if node is not None:
name = node._v_pathname
Expand All @@ -234,7 +243,7 @@ def __len__(self):
return len(self.groups())

def __repr__(self):
output = '%s\nFile path: %s\n' % (type(self), self.path)
output = '%s\nFile path: %s\n' % (type(self), self._path)

if len(self.keys()):
keys = []
Expand Down Expand Up @@ -277,7 +286,7 @@ def open(self, mode='a', warn=True):
mode : {'a', 'w', 'r', 'r+'}, default 'a'
See HDFStore docstring or tables.openFile for info about modes
"""
self.mode = mode
self._mode = mode
if warn and mode == 'w': # pragma: no cover
while True:
response = raw_input("Re-opening as mode='w' will delete the "
Expand All @@ -286,36 +295,36 @@ def open(self, mode='a', warn=True):
break
elif response == 'n':
return
if self.handle is not None and self.handle.isopen:
self.handle.close()
if self._handle is not None and self._handle.isopen:
self._handle.close()

if self.complib is not None:
if self.complevel is None:
self.complevel = 9
self.filters = _tables().Filters(self.complevel,
self.complib,
fletcher32=self.fletcher32)
if self._complib is not None:
if self._complevel is None:
self._complevel = 9
self._filters = _tables().Filters(self._complevel,
self._complib,
fletcher32=self._fletcher32)

try:
self.handle = h5_open(self.path, self.mode)
self._handle = h5_open(self._path, self._mode)
except IOError, e: # pragma: no cover
if 'can not be written' in str(e):
print 'Opening %s in read-only mode' % self.path
self.handle = h5_open(self.path, 'r')
print 'Opening %s in read-only mode' % self._path
self._handle = h5_open(self._path, 'r')
else:
raise

def close(self):
"""
Close the PyTables file handle
"""
self.handle.close()
self._handle.close()

def flush(self):
"""
Force all buffered modifications to be written to disk
"""
self.handle.flush()
self._handle.flush()

def get(self, key):
"""
Expand Down Expand Up @@ -617,14 +626,14 @@ def create_table_index(self, key, **kwargs):
def groups(self):
""" return a list of all the top-level nodes (that are not themselves a pandas storage object) """
_tables()
return [ g for g in self.handle.walkNodes() if getattr(g._v_attrs,'pandas_type',None) or getattr(g,'table',None) or (isinstance(g,_table_mod.table.Table) and g._v_name != 'table') ]
return [ g for g in self._handle.walkNodes() if getattr(g._v_attrs,'pandas_type',None) or getattr(g,'table',None) or (isinstance(g,_table_mod.table.Table) and g._v_name != 'table') ]

def get_node(self, key):
""" return the node with the key or None if it does not exist """
try:
if not key.startswith('/'):
key = '/' + key
return self.handle.getNode(self.root, key)
return self._handle.getNode(self.root, key)
except:
return None

Expand Down Expand Up @@ -751,7 +760,7 @@ def _write_to_group(self, key, value, index=True, table=False, append=False, com

# remove the node if we are not appending
if group is not None and not append:
self.handle.removeNode(group, recursive=True)
self._handle.removeNode(group, recursive=True)
group = None

if group is None:
Expand All @@ -768,7 +777,7 @@ def _write_to_group(self, key, value, index=True, table=False, append=False, com
new_path += p
group = self.get_node(new_path)
if group is None:
group = self.handle.createGroup(path, p)
group = self._handle.createGroup(path, p)
path = new_path

s = self._create_storer(group, value, table=table, append=append, **kwargs)
Expand Down Expand Up @@ -1304,28 +1313,28 @@ def pathname(self):
return self.group._v_pathname

@property
def handle(self):
return self.parent.handle
def _handle(self):
return self.parent._handle

@property
def _quiet(self):
return self.parent._quiet

@property
def filters(self):
return self.parent.filters
def _filters(self):
return self.parent._filters

@property
def complevel(self):
return self.parent.complevel
def _complevel(self):
return self.parent._complevel

@property
def fletcher32(self):
return self.parent.fletcher32
def _fletcher32(self):
return self.parent._fletcher32

@property
def complib(self):
return self.parent.complib
def _complib(self):
return self.parent._complib

@property
def attrs(self):
Expand Down Expand Up @@ -1380,7 +1389,7 @@ def write(self, **kwargs):
def delete(self, where = None, **kwargs):
""" support fully deleting the node in its entirety (only) - where specification must be None """
if where is None:
self.handle.removeNode(self.group, recursive=True)
self._handle.removeNode(self.group, recursive=True)
return None

raise NotImplementedError("cannot delete on an abstract storer")
Expand Down Expand Up @@ -1583,7 +1592,7 @@ def read_index_node(self, node):

def write_array(self, key, value):
if key in self.group:
self.handle.removeNode(self.group, key)
self._handle.removeNode(self.group, key)

# Transform needed to interface with pytables row/col notation
empty_array = any(x == 0 for x in value.shape)
Expand All @@ -1593,7 +1602,7 @@ def write_array(self, key, value):
value = value.T
transposed = True

if self.filters is not None:
if self._filters is not None:
atom = None
try:
# get the atom for this datatype
Expand All @@ -1603,9 +1612,9 @@ def write_array(self, key, value):

if atom is not None:
# create an empty chunked array and fill it from value
ca = self.handle.createCArray(self.group, key, atom,
ca = self._handle.createCArray(self.group, key, atom,
value.shape,
filters=self.filters)
filters=self._filters)
ca[:] = value
getattr(self.group, key)._v_attrs.transposed = transposed
return
Expand All @@ -1622,21 +1631,21 @@ def write_array(self, key, value):
ws = performance_doc % (inferred_type,key)
warnings.warn(ws, PerformanceWarning)

vlarr = self.handle.createVLArray(self.group, key,
vlarr = self._handle.createVLArray(self.group, key,
_tables().ObjectAtom())
vlarr.append(value)
elif value.dtype.type == np.datetime64:
self.handle.createArray(self.group, key, value.view('i8'))
self._handle.createArray(self.group, key, value.view('i8'))
getattr(self.group, key)._v_attrs.value_type = 'datetime64'
else:
if empty_array:
# ugly hack for length 0 axes
arr = np.empty((1,) * value.ndim)
self.handle.createArray(self.group, key, arr)
self._handle.createArray(self.group, key, arr)
getattr(self.group, key)._v_attrs.value_type = str(value.dtype)
getattr(self.group, key)._v_attrs.shape = value.shape
else:
self.handle.createArray(self.group, key, value)
self._handle.createArray(self.group, key, value)

getattr(self.group, key)._v_attrs.transposed = transposed

Expand Down Expand Up @@ -1729,7 +1738,7 @@ def write(self, obj, **kwargs):
for name, ss in obj.iteritems():
key = 'sparse_series_%s' % name
if key not in self.group._v_children:
node = self.handle.createGroup(self.group, key)
node = self._handle.createGroup(self.group, key)
else:
node = getattr(self.group, key)
s = SparseSeriesStorer(self.parent, node)
Expand Down Expand Up @@ -1763,7 +1772,7 @@ def write(self, obj, **kwargs):
for name, sdf in obj.iteritems():
key = 'sparse_frame_%s' % name
if key not in self.group._v_children:
node = self.handle.createGroup(self.group, key)
node = self._handle.createGroup(self.group, key)
else:
node = getattr(self.group, key)
s = SparseFrameStorer(self.parent, node)
Expand Down Expand Up @@ -2293,13 +2302,13 @@ def create_description(self, complib=None, complevel=None, fletcher32=False, exp

if complib:
if complevel is None:
complevel = self.complevel or 9
complevel = self._complevel or 9
filters = _tables().Filters(complevel=complevel,
complib=complib,
fletcher32=fletcher32 or self.fletcher32)
fletcher32=fletcher32 or self._fletcher32)
d['filters'] = filters
elif self.filters is not None:
d['filters'] = self.filters
elif self._filters is not None:
d['filters'] = self._filters

return d

Expand Down Expand Up @@ -2484,7 +2493,7 @@ def write(self, obj, axes=None, append=False, complib=None,
expectedrows=None, **kwargs):

if not append and self.is_exists:
self.handle.removeNode(self.group, 'table')
self._handle.removeNode(self.group, 'table')

# create the axes
self.create_axes(axes=axes, obj=obj, validate=append,
Expand All @@ -2502,7 +2511,7 @@ def write(self, obj, axes=None, append=False, complib=None,
self.set_attrs()

# create the table
table = self.handle.createTable(self.group, **options)
table = self._handle.createTable(self.group, **options)

else:
table = self.table
Expand Down Expand Up @@ -2579,7 +2588,7 @@ def delete(self, where=None, **kwargs):
# delete all rows (and return the nrows)
if where is None or not len(where):
nrows = self.nrows
self.handle.removeNode(self.group, recursive=True)
self._handle.removeNode(self.group, recursive=True)
return nrows

# infer the data kind
Expand Down
Loading

0 comments on commit 7065ff0

Please sign in to comment.