Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

140 multiple outputs #311

Merged
merged 17 commits into from
Jul 21, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
318 changes: 33 additions & 285 deletions syncopy/datatype/base_data.py

Large diffs are not rendered by default.

57 changes: 16 additions & 41 deletions syncopy/datatype/continuous_data.py
Original file line number Diff line number Diff line change
@@ -65,7 +65,7 @@ def data(self, inData):
def __str__(self):
# Get list of print-worthy attributes
ppattrs = [attr for attr in self.__dir__()
if not (attr.startswith("_") or attr in ["log", "trialdefinition", "hdr"])]
if not (attr.startswith("_") or attr in ["log", "trialdefinition"])]
ppattrs = [attr for attr in ppattrs
if not (inspect.ismethod(getattr(self, attr))
or isinstance(getattr(self, attr), Iterator))]
@@ -183,38 +183,25 @@ def time(self):

# # Helper function that reads a single trial into memory
# @staticmethod
# def _copy_trial(trialno, filename, dimord, sampleinfo, hdr):
# def _copy_trial(trialno, filename, dimord, sampleinfo):
# """
# # FIXME: currently unused - check back to see if we need this functionality
# """
# idx = [slice(None)] * len(dimord)
# idx[dimord.index("time")] = slice(int(sampleinfo[trialno, 0]), int(sampleinfo[trialno, 1]))
# idx = tuple(idx)
# if hdr is None:
# # Generic case: data is either a HDF5 dataset or memmap
# try:
# with h5py.File(filename, mode="r") as h5f:
# h5keys = list(h5f.keys())
# cnt = [h5keys.count(dclass) for dclass in spy.datatype.__all__
# if not inspect.isfunction(getattr(spy.datatype, dclass))]
# if len(h5keys) == 1:
# arr = h5f[h5keys[0]][idx]
# else:
# arr = h5f[spy.datatype.__all__[cnt.index(1)]][idx]
# except:
# try:
# arr = np.array(open_memmap(filename, mode="c")[idx])
# except:
# raise SPYIOError(filename)
# return arr
# else:
# # For VirtualData objects
# dsets = []
# for fk, fname in enumerate(filename):
# dsets.append(np.memmap(fname, offset=int(hdr[fk]["length"]),
# mode="r", dtype=hdr[fk]["dtype"],
# shape=(hdr[fk]["M"], hdr[fk]["N"]))[idx])
# return np.vstack(dsets)
# try:
# with h5py.File(filename, mode="r") as h5f:
# h5keys = list(h5f.keys())
# cnt = [h5keys.count(dclass) for dclass in spy.datatype.__all__
# if not inspect.isfunction(getattr(spy.datatype, dclass))]
# if len(h5keys) == 1:
# arr = h5f[h5keys[0]][idx]
# else:
# arr = h5f[spy.datatype.__all__[cnt.index(1)]][idx]
# except:
# raise SPYIOError(filename)
# return arr

# Helper function that grabs a single trial
def _get_trial(self, trialno):
@@ -414,22 +401,13 @@ class AnalogData(ContinuousData):
The data is always stored as a two-dimensional array on disk. On disk, Trials are
concatenated along the time axis.
Data is only read from disk on demand, similar to memory maps and HDF5
files.
Data is only read from disk on demand, similar to HDF5 files.
"""

_infoFileProperties = ContinuousData._infoFileProperties + ("_hdr",)
_infoFileProperties = ContinuousData._infoFileProperties
_defaultDimord = ["time", "channel"]
_stackingDimLabel = "time"

@property
def hdr(self):
"""dict with information about raw data
This property is empty for data created by Syncopy.
"""
return self._hdr

# "Constructor"
def __init__(self,
data=None,
@@ -468,9 +446,6 @@ def __init__(self,
if data is not None and dimord is None:
dimord = self._defaultDimord

# Assign default (blank) values
self._hdr = None

# Call parent initializer
super().__init__(data=data,
filename=filename,
28 changes: 6 additions & 22 deletions syncopy/datatype/discrete_data.py
Original file line number Diff line number Diff line change
@@ -13,7 +13,6 @@
# Local imports
from .base_data import BaseData, Indexer, FauxTrial
from .methods.definetrial import definetrial
from .methods.selectdata import selectdata
from syncopy.shared.parsers import scalar_parser, array_parser
from syncopy.shared.errors import SPYValueError
from syncopy.shared.tools import best_match
@@ -29,7 +28,7 @@ class DiscreteData(BaseData, ABC):
This class cannot be instantiated. Use one of the children instead.
"""

_infoFileProperties = BaseData._infoFileProperties + ("_hdr", "samplerate", )
_infoFileProperties = BaseData._infoFileProperties + ("samplerate", )
_hdfFileAttributeProperties = BaseData._hdfFileAttributeProperties + ("samplerate",)
_hdfFileDatasetProperties = BaseData._hdfFileDatasetProperties + ("data",)

@@ -59,7 +58,7 @@ def data(self, inData):
def __str__(self):
# Get list of print-worthy attributes
ppattrs = [attr for attr in self.__dir__()
if not (attr.startswith("_") or attr in ["log", "trialdefinition", "hdr"])]
if not (attr.startswith("_") or attr in ["log", "trialdefinition"])]
ppattrs = [attr for attr in ppattrs
if not (inspect.ismethod(getattr(self, attr))
or isinstance(getattr(self, attr), Iterator))]
@@ -113,14 +112,6 @@ def __str__(self):
ppstr += "\nUse `.log` to see object history"
return ppstr

@property
def hdr(self):
"""dict with information about raw data
This property is empty for data created by Syncopy.
"""
return self._hdr

@property
def sample(self):
"""Indices of all recorded samples"""
@@ -309,7 +300,6 @@ def __init__(self, data=None, samplerate=None, trialid=None, **kwargs):
# Assign (default) values
self._trialid = None
self._samplerate = None
self._hdr = None
self._data = None

self.samplerate = samplerate
@@ -335,9 +325,7 @@ class SpikeData(DiscreteData):
stored as a two-dimensional [nSpikes x 3] array on disk with the columns
being ``["sample", "channel", "unit"]``.
Data is only read from disk on demand, similar to memory maps and HDF5
files.
Data is only read from disk on demand, similar to HDF5 files.
"""

_infoFileProperties = DiscreteData._infoFileProperties + ("channel", "unit",)
@@ -485,8 +473,7 @@ def __init__(self,
ordered list of dimension labels
1. `filename` + `data` : create hdf dataset incl. sampleinfo @filename
2. `filename` no `data` : read from file or memmap (spy, hdf5, npy file
array -> memmap)
2. `filename` no `data` : read from file (spy, hdf5 file)
3. just `data` : try to attach data (error checking done by
:meth:`SpikeData.data.setter`)
@@ -517,9 +504,7 @@ class EventData(DiscreteData):
stimulus was turned on, etc. These usually occur at non-regular time points
and have associated event codes.
Data is only read from disk on demand, similar to memory maps and HDF5
files.
Data is only read from disk on demand, similar to HDF5 files.
"""

_defaultDimord = ["sample", "eventid"]
@@ -604,8 +589,7 @@ def __init__(self,
ordered list of dimension labels
1. `filename` + `data` : create hdf dataset incl. sampleinfo @filename
2. `filename` no `data` : read from file or memmap (spy, hdf5, npy file
array -> memmap)
2. `filename` no `data` : read from file(spy, hdf5)
3. just `data` : try to attach data (error checking done by
:meth:`EventData.data.setter`)
4 changes: 2 additions & 2 deletions syncopy/datatype/methods/arithmetic.py
Original file line number Diff line number Diff line change
@@ -13,7 +13,7 @@
from syncopy.shared.parsers import data_parser
from syncopy.shared.errors import SPYValueError, SPYTypeError, SPYWarning, SPYInfo
from syncopy.shared.computational_routine import ComputationalRoutine
from syncopy.shared.kwarg_decorators import unwrap_io
from syncopy.shared.kwarg_decorators import process_io
from syncopy.shared.computational_routine import ComputationalRoutine
if __acme__:
import dask.distributed as dd
@@ -429,7 +429,7 @@ def _perform_computation(baseObj,
return out


@unwrap_io
@process_io
def arithmetic_cF(base_dat, operand_dat, operand_idx, operation=None, opres_type=None,
noCompute=False, chunkShape=None):
"""
4 changes: 2 additions & 2 deletions syncopy/datatype/methods/padding.py
Original file line number Diff line number Diff line change
@@ -9,7 +9,7 @@
# Local imports
from syncopy.datatype.continuous_data import AnalogData
from syncopy.shared.computational_routine import ComputationalRoutine
from syncopy.shared.kwarg_decorators import unwrap_io
from syncopy.shared.kwarg_decorators import process_io
from syncopy.shared.parsers import data_parser, array_parser, scalar_parser
from syncopy.shared.errors import SPYTypeError, SPYValueError, SPYWarning
from syncopy.shared.kwarg_decorators import unwrap_cfg, unwrap_select, detect_parallel_client
@@ -564,7 +564,7 @@ def _nextpow2(number):
return n


@unwrap_io
@process_io
def padding_cF(trl_dat, timeAxis, chanAxis, pad_opt, noCompute=False, chunkShape=None):
"""
Perform trial data padding
4 changes: 2 additions & 2 deletions syncopy/datatype/methods/selectdata.py
Original file line number Diff line number Diff line change
@@ -10,7 +10,7 @@
from syncopy.shared.tools import get_frontend_cfg, get_defaults
from syncopy.shared.parsers import data_parser
from syncopy.shared.errors import SPYValueError, SPYTypeError, SPYInfo
from syncopy.shared.kwarg_decorators import unwrap_cfg, unwrap_io, detect_parallel_client
from syncopy.shared.kwarg_decorators import unwrap_cfg, process_io, detect_parallel_client
from syncopy.shared.computational_routine import ComputationalRoutine

__all__ = ["selectdata"]
@@ -365,7 +365,7 @@ def _get_selection_size(data):
return sum(fauxSizes) / 1024**2


@unwrap_io
@process_io
def _selectdata(trl, noCompute=False, chunkShape=None):
if noCompute:
return trl.shape, trl.dtype
Loading