diff --git a/README.rst b/README.rst
index 4af1a3a4b..ed95331bb 100644
--- a/README.rst
+++ b/README.rst
@@ -1,16 +1,43 @@
-.. image:: doc/source/syncopy_logo.png
+.. image:: doc/source/_static/syncopy_logo.png
 
 Systems Neuroscience Computing in Python
 ========================================
-
-Syncopy is going to be a user-friendly analysis toolkit for large-scale
-electrophysiology data. The goals of the project are:
+Syncopy is going to be a user-friendly toolkit for large-scale
+electrophysiology data-analysis in Python. We strive to achieve the following goals:
 
 1. Syncopy is a *fully open source Python* environment for electrophysiology
    data analysis. 
-2. Syncopy is *scalable* for large datasets. It will automatically make use of the
-   available computing resources by being developed with built-in parallelism. 
+2. Syncopy is *scalable* and built for *very large datasets*. It will automatically 
+   make use of available computing resources and is developed with built-in 
+   parallelism in mind. 
 3. Syncopy is *compatible with FieldTrip*. 
-    - data and results can loaded into MATLAB and Python
+    - data and results can be loaded into MATLAB and Python
     - parameter names and function call syntax are as similar as possible
 
+Syncopy is developed at the
+`Ernst Strüngmann Institute (ESI) gGmbH for Neuroscience in Cooperation with Max Planck Society <https://www.esi-frankfurt.de/>`_
+and released free of charge under the 
+`BSD 3-Clause "New" or "Revised" License <https://en.wikipedia.org/wiki/BSD_licenses#3-clause_license_(%22BSD_License_2.0%22,_%22Revised_BSD_License%22,_%22New_BSD_License%22,_or_%22Modified_BSD_License%22)>`_. 
+
+Contact
+-------
+To report bugs or ask questions please use our GitHub issue tracker. For
+general inquiries please contact syncopy (at) esi-frankfurt.de. 
+
+Installation
+============
+Syncopy is available on pip
+
+.. code-block:: bash
+
+   pip install syncopy
+
+To get the latest development version, simply clone our GitHub repository:
+
+.. code-block:: bash
+
+   git clone https://github.com/path/to/syncopy
+
+Getting Started
+===============
+Please visit our `online documentation <http://monitor/syncopy/quickstart.html>`. 
diff --git a/doc/source/README.rst b/doc/source/README.rst
index 48cc0a84b..7ed4de833 100644
--- a/doc/source/README.rst
+++ b/doc/source/README.rst
@@ -1,28 +1,119 @@
-.. Syncopy documentation master file, created by
+.. Syncopy documentation master file
 
 .. title:: Syncopy Documentation
    
-.. image:: syncopy_logo.png
+.. image:: _static/syncopy_logo.png
     :alt: Syncopy logo
     :height: 200px
     :align: center
 
-Welcome to Syncopy's Documentation!
-===================================
 
-Syncopy (Systems Neuroscience COmputing in Python) is a Python toolkit for
-user-friendly, large-scale electrophysiology data analysis. The goals of the
-project are:
+Welcome to the Documentation of SyNCoPy!
+========================================
 
-1. Syncopy provides a fully *open source* Python environment for reproducable
+SyNCoPy (**Sy**\stems **N**\euroscience **Co**\mputing in **Py**\thon, spelled Syncopy in the following) 
+is a Python toolkit for user-friendly, large-scale electrophysiology data analysis. 
+We strive to achieve the following goals:
+
+1. Syncopy provides a full *open source* Python environment for reproducible
    electrophysiology data analysis.
-2. Syncopy is *scalable* for large datasets. It automatically makes use of the
-   available computing resources by being developed with built-in parallelism.
-3. Syncopy is *compatible* with the MATLAB toolbox FieldTrip.
+2. Syncopy is *scalable* to accommodate *very large* datasets. It automatically 
+   makes use of available computing resources and is developed with built-in 
+   parallelism in mind.
+3. Syncopy is *compatible* with the MATLAB toolbox `FieldTrip <http://www.fieldtriptoolbox.org/>`_.
+
+.. contents:: Contents
+    :local:
+    :depth: 1
+
+Getting Started
+---------------
+Our :doc:`Quickstart Guide <quickstart>` covers installation and basic usage. 
+More in-depth information relevant to every user of Syncopy can be found in our 
+:doc:`User Guide <user/users>`. Want to contribute or just curious how the sausage 
+is made? Take a look at our :doc:`Developer Guide <developer/developers>`. Once again
+in order of brevity:
+
+* :doc:`Quickstart Guide <quickstart>`
+* :doc:`User Guide <user/users>`
+* :doc:`Developer Guide <developer/developers>`
+
+Resources by Topic 
+^^^^^^^^^^^^^^^^^^
+Looking for information regarding a specific analysis method? The table below 
+might help. 
+
+.. cssclass:: table-hover
+
++-------------------+-----------------------+---------------------------+
+| **Topic**         | **Resources**         | **Description**           |
++-------------------+-----------------------+---------------------------+
+| |TnW|             | |Spy4FT|              | |Spy4FTDesc|              |
+|                   +-----------------------+---------------------------+
+|                   | |SpyData|             | |SpyDataDesc|             |
+|                   +-----------------------+---------------------------+
+|                   | |UG|                  | |UGDesc|                  |
++-------------------+-----------------------+---------------------------+
+| |RDoc|            | |UsrAPI|              | |UsrAPIDesc|              |
+|                   +-----------------------+---------------------------+
+|                   | |DevAPI|              | |DevAPIDesc|              |
+|                   +-----------------------+---------------------------+
+|                   | |DevTools|            | |DevToolsDesc|            |
+|                   +-----------------------+---------------------------+
+|                   | |Indx|                | |IndxDesc|                |
++-------------------+-----------------------+---------------------------+
+| |Spec|            | |SpecTut|             | |SpecTutDesc|             |
+|                   +-----------------------+---------------------------+
+|                   | |SpecEx|              | |SpecExDesc|              |
+|                   +-----------------------+---------------------------+
+|                   | |SpecAdv|             | |SpecAdvDesc|             |
++-------------------+-----------------------+---------------------------+
+| |Con|             | |ConTut|              | |ConTutDesc|              |
+|                   +-----------------------+---------------------------+
+|                   | |ConEx|               | |ConExDesc|               |
+|                   +-----------------------+---------------------------+
+|                   | |ConAdv|              | |ConAdvDesc|              |
++-------------------+-----------------------+---------------------------+
+
+.. |TnW| replace:: *Tutorials & Walkthroughs*
+.. |RDoc| replace:: *Reference Documentation*
+.. |Spec| replace:: *Spectral Estimation*
+.. |Con| replace:: *Connectivity*
 
+.. |Spy4FT| replace:: :doc:`Syncopy for FieldTrip Users <user/fieldtrip>`
+.. |Spy4FTDesc| replace:: Quick introduction to Syncopy from a FieldTrip user's perspective
+.. |SpyData| replace:: :doc:`Data Handling in Syncopy <user/data_handling>`
+.. |SpyDataDesc| replace:: Overview of Syncopy's data management
+.. |UG| replace:: :doc:`Syncopy User Guide <user/users>`
+.. |UGDesc| replace:: Syncopy's user manual
 
-Contents
-========
+.. |UsrAPI| replace:: :doc:`User API <user/user_api>`
+.. |UsrAPIDesc| replace:: The subset of Syncopy's interface relevant to users
+.. |DevAPI| replace:: :doc:`Developer API <developer/developer_api>`
+.. |DevAPIDesc| replace:: The parts of Syncopy mostly interesting for developers
+.. |Indx| replace:: :ref:`Package Index <genindex>`
+.. |IndxDesc| replace:: Index of all functions/classes
+.. |DevTools| replace:: :doc:`Syncopy Developer Tools <developer/tools>`
+.. |DevToolsDesc| replace:: Tools for contributing new functionality to Syncopy
+
+.. |SpecTut| replace:: Spectral Estimation Tutorial
+.. |SpecTutDesc| replace:: An introduction to the available spectral estimation methods in Syncopy
+.. |SpecEx| replace:: Spectral Estimation Examples
+.. |SpecExDesc| replace:: Example scripts and notebooks illustrating spectral estimation in Syncopy
+.. |SpecAdv| replace:: Advanced Topics in Spectral Estimation
+.. |SpecAdvDesc| replace:: Technical details and notes for advanced users/developers
+
+.. |ConTut| replace:: Connectivity Tutorial
+.. |ConTutDesc| replace:: An introduction to connectivity estimation in Syncopy
+.. |ConEx| replace:: Connectivity Examples
+.. |ConExDesc| replace:: Example scripts and notebooks illustrating the use of connectivity metrics in Syncopy
+.. |ConAdv| replace:: Advanced Topics in Connectivity 
+.. |ConAdvDesc| replace:: Technical details and notes for advanced users/developers
+
+Still no luck finding what you're looking for? Try using the :ref:`search <search>` function. 
+
+Sitemap
+-------
 .. toctree::
    :maxdepth: 2
 
@@ -30,9 +121,12 @@ Contents
    user/users.rst    
    developer/developers.rst   
 
-
 Indices and tables
-==================
-
+^^^^^^^^^^^^^^^^^^
 * :ref:`genindex`
 * :ref:`search`
+
+Contact
+-------
+To report bugs or ask questions please use our GitHub issue tracker. For
+general inquiries please contact syncopy (at) esi-frankfurt.de. 
diff --git a/doc/source/syncopy_logo.pdf b/doc/source/_static/syncopy_logo.pdf
similarity index 100%
rename from doc/source/syncopy_logo.pdf
rename to doc/source/_static/syncopy_logo.pdf
diff --git a/doc/source/syncopy_logo.png b/doc/source/_static/syncopy_logo.png
similarity index 100%
rename from doc/source/syncopy_logo.png
rename to doc/source/_static/syncopy_logo.png
diff --git a/doc/source/developer/datatype.rst b/doc/source/developer/datatype.rst
index 4d14d0937..802c10180 100644
--- a/doc/source/developer/datatype.rst
+++ b/doc/source/developer/datatype.rst
@@ -1,6 +1,6 @@
 .. _syncopy-data-classes:
 
-Syncopy data classes
+Syncopy Data Classes
 ====================
 
 The data structure in Syncopy is based around the idea that all
@@ -9,7 +9,7 @@ example, a multi-channel local field potential can be stored as a
 two-dimensional `float` array with the dimensions being time (sample) and
 channel. Hence,
 
-.. note:: Each Syncopy data object is simply an anotated multi-dimensional array.
+.. note:: Each Syncopy data object is simply an annotated multi-dimensional array.
 
 This array is always stored in the :attr:`data` property and can be
 indexed using `NumPy indexing
@@ -29,10 +29,9 @@ classes (see also `Wikipedia
 
 The bottom classes in the class tree are for active use in analyses.
 
-The usable Syncopy data classes
--------------------------------
-
-The classes that
+Usable Syncopy Data Classes
+----------------------------
+The following classes can be instanced at the package-level (``spy.AnalogData(...)`` etc.)
 
 .. autosummary::
 
diff --git a/doc/source/developer/developer_api.rst b/doc/source/developer/developer_api.rst
index ec11bb13b..71435c794 100644
--- a/doc/source/developer/developer_api.rst
+++ b/doc/source/developer/developer_api.rst
@@ -16,16 +16,13 @@ syncopy.datatype
     syncopy.datatype.discrete_data.DiscreteData
 
 
-syncopy.specest
-^^^^^^^^^^^^^^^
+syncopy.misc
+^^^^^^^^^^^^
 
 .. autosummary::
     :toctree: _stubs
 
-    syncopy.specest.mtmfft.mtmfft
-    syncopy.specest.mtmfft.MultiTaperFFT
-    syncopy.specest.wavelet.wavelet
-    syncopy.specest.wavelet.WaveletTransform
+    syncopy.tests.misc.generate_artificial_data
 
 
 syncopy.shared
@@ -39,3 +36,15 @@ syncopy.shared
     syncopy.shared.errors.SPYTypeError
     syncopy.shared.errors.SPYValueError
     syncopy.shared.errors.SPYIOError
+
+
+syncopy.specest
+^^^^^^^^^^^^^^^
+
+.. autosummary::
+    :toctree: _stubs
+
+    syncopy.specest.mtmfft.mtmfft
+    syncopy.specest.mtmfft.MultiTaperFFT
+    syncopy.specest.wavelet.wavelet
+    syncopy.specest.wavelet.WaveletTransform
diff --git a/doc/source/developer/io.rst b/doc/source/developer/io.rst
index b3f920648..b2fd56025 100644
--- a/doc/source/developer/io.rst
+++ b/doc/source/developer/io.rst
@@ -1,32 +1,32 @@
-Reading from and writing data to disk
-=====================================
+Reading and Writing Data
+=========================
 
 .. contents::
     Contents
     :local:
 
 
-The Syncopy data format (``*.spy``)
+The Syncopy Data Format (``*.spy``)
 -----------------------------------
 
-As each Syncopy data object is nothing more than an anotated multi-dimensional
-array each object is usually stored in 
+As each Syncopy data object is simply an annotated multi-dimensional
+array every object is stored as
 
 1. a binary file for the data arrays and
 2. a human-readable file for metadata.
 
 Syncopy aims to be scalable for very large files that don't fit into memory. To
-cope with those kinds of files, it is usually necessary to stream data from and
-to disk only on demand. A file format that is well-established for this 
-purpose is `HDF5 <https://www.hdfgroup.org/>`_, which is therefore the default
+cope with such files, it is usually necessary to perform on-demand streaming 
+of data from and to disk. A file format that is well-established for this 
+purpose is `HDF5 <https://www.hdfgroup.org/>`_, which is, therefore, the default
 storage backend of Syncopy. In addition, metadata are stored in `JSON
-<https://en.wikipedia.org/wiki/JSON>`_, which is both easily human-readable 
+<https://en.wikipedia.org/wiki/JSON>`_, which is both easily human- 
 and machine-readable.
 
-The data files are usually stored in a folder called ``<basename>.spy``, which
-can contain multiple data of different data classes that have been recorded
-simulatenously, e.g. spikes and local field potentials. The standard naming
-pattern of the data files is the following
+By default, Syncopy's data files are stored in a folder called ``<basename>.spy``, which
+can contain the on-disk representations of multiple objects of different classes
+(e.g., spikes and local field potentials that have been recorded simultaneously). 
+The standard naming pattern of Syncopy's data files is as follows:
 
 :: 
 
@@ -40,8 +40,8 @@ pattern of the data files is the following
 The ``<dataclass>`` specifies the type of data that is stored in the file, i.e.
 one of the :ref:`syncopy-data-classes`. The ``<tag>`` part of the filename is
 user-defined to distinguish data of the same data class, that should be kept
-separate, e.g. data from separate electrode arrays. The data can be loaded into
-Python using the :func:`syncopy.load` function.
+separate, e.g. data from separate electrode arrays. Data can be loaded using 
+the :func:`syncopy.load` function.
 
 
 **Example folder**
@@ -66,13 +66,13 @@ Python using the :func:`syncopy.load` function.
 
 
 
-Structure of the data file (HDF5)
+Structure of the Data File (HDF5)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 The HDF5 file contains some metadata (`HDF5 attributes
 <http://docs.h5py.org/en/stable/high/attr.html>`_) in its header (partially
-redundant with JSON file), the ``data`` array in binary form (`HDF5 dataset
-<http://docs.h5py.org/en/stable/high/dataset.html>`_), and a ``[nTrials x
+redundant with the corresponding JSON file), the ``data`` array in binary form 
+(`HDF5 dataset <http://docs.h5py.org/en/stable/high/dataset.html>`_), and a ``[nTrials x
 3+k]``-sized ``trialdefinition`` array containing information about the trials
 defined on the data (trial_start, trial_stop, trial_triggeroffset, trialinfo_1,
 trialinfo_2, ..., trialinfo_k).
@@ -99,7 +99,7 @@ the data directly include `HDFView
 <https://github.com/HDFGroup/hdf-compass>`_.
 
 
-Structure of the metadata file (JSON)
+Structure of the Metadata File (JSON)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 The JSON file contains all metadata relevant to the data object. The required fields
@@ -177,7 +177,7 @@ Example JSON file:
 
     
 
-Reading other data formats
+Reading Other File Formats
 --------------------------
 
 Reading and writing other data formats is currently not supported. Getting your
@@ -189,4 +189,4 @@ empty data object (e.g. :class:`syncopy.AnalogData`) and fills the ``data``
 property with an index-able array as well as all relevant metadata properties.
 
 In future releases of Syncopy, example reading routines and/or exporting
-functions will be provided.
\ No newline at end of file
+functions will be provided.
diff --git a/doc/source/developer/tools.rst b/doc/source/developer/tools.rst
index 4315b21d9..a39320bd9 100644
--- a/doc/source/developer/tools.rst
+++ b/doc/source/developer/tools.rst
@@ -1,10 +1,8 @@
-Tools for developing Syncopy
+Tools for Developing Syncopy
 ============================
-Some profoundly insightful text here...
+Coming soon...
 
-
-
-Input parsing and error checking
+Input Parsing and Error Checking
 --------------------------------
 
 .. autosummary::
@@ -23,15 +21,15 @@ Writing A New Analysis Routine
 Any analysis routine that operates on Syncopy data is always structured in three
 (hierarchical) parts:
 
-1. A numerical function based on NumPy/SciPy only that works on a
+1. A numerical function based only on NumPy/SciPy that works on a
    :class:`numpy.ndarray` and returns a :class:`numpy.ndarray`. 
-2. A wrapper class that handles output initializiation, potential
-   parallelization and post-computation cleanup. This should be based on the
+2. A wrapper class that handles output initialization, potential
+   parallelization and post-computation cleanup. The class should be based on the
    abstract class :class:`syncopy.shared.computational_routine.ComputationalRoutine`
 3. Another wrapping metafunction handling method selection, parameterization and
-   error checking is then provided for the users.
+   error checking is then provided user interaction.
 
-An example for this type of structure is the multi-taper fourier analysis. The
+An example of this type of structure is the multi-taper fourier analysis. The
 corresponding stages here are
 
 1. Numerical function: :func:`syncopy.specest.mtmfft`
@@ -42,4 +40,3 @@ corresponding stages here are
 
 For a detailed walk-through explaining the intricacies of writing an analysis
 routine, please refer to the :doc:`compute_kernels`.
-
diff --git a/doc/source/user/data_handling.rst b/doc/source/user/data_handling.rst
index 131f0af5f..d0642de25 100644
--- a/doc/source/user/data_handling.rst
+++ b/doc/source/user/data_handling.rst
@@ -1,6 +1,5 @@
-Handling data in Syncopy
+Handling Data in Syncopy
 ========================
-
 Syncopy utilizes a simple data format based on `HDF5
 <https://portal.hdfgroup.org/display/HDF5/HDF5>`_ and `JSON
 <https://en.wikipedia.org/wiki/JSON>`_ (see :doc:`../developer/io` for details).
@@ -14,8 +13,10 @@ importing and exporting engines, for example based on `Neo
 <https://neo.readthedocs.io/en/latest/>`_ or `NWB <https://www.nwb.org/>`_.
 
 
-Reading and saving Syncopy (``*.spy``) data
+Loading and Saving Syncopy (``*.spy``) Data
 -------------------------------------------
+Reading and writing data with Syncopy
+
 .. autosummary::
 
     syncopy.load
@@ -23,11 +24,21 @@ Reading and saving Syncopy (``*.spy``) data
 
 
 
-Functions for editing data in memory
-------------------------------------
-These functions are useful for editing and slicing data:
+Functions for Editing Syncopy Data Objects
+-------------------------------------------
+Defining trials, data selection and padding. 
 
 .. autosummary::
 
     syncopy.definetrial
+    syncopy.selectdata
     syncopy.padding
+
+Advanced Topics
+---------------
+More information about Syncopy's data class structure and file format. 
+
+.. toctree::
+
+    ../developer/datatype
+    ../developer/io
diff --git a/doc/source/user/fieldtrip.rst b/doc/source/user/fieldtrip.rst
index 3ab33b950..ef902debd 100644
--- a/doc/source/user/fieldtrip.rst
+++ b/doc/source/user/fieldtrip.rst
@@ -1,24 +1,24 @@
-Syncopy for FieldTrip users
+Syncopy for FieldTrip Users
 ===========================
 
 Syncopy is written in the `Python programming language
 <https://www.python.org/>`_ using the `NumPy <https://www.numpy.org/>`_ and
 `SciPy <https://scipy.org/>`_ libraries for computing as well as `Dask
-<https://dask.org>`_ for parallelization. However, it's call signatures and
-parameter names are designed to mimick the `MATLAB <https://mathworks.com>`_
+<https://dask.org>`_ for parallelization. However, its call signatures and
+parameter names are designed to mimic the `MATLAB <https://mathworks.com>`_
 analysis toolbox `FieldTrip <http://www.fieldtriptoolbox.org>`_.
 
-The scope of Syncopy is limited to only parts of FieldTrip, in particular
+The scope of Syncopy is limited to emulate parts of FieldTrip, in particular
 spectral analysis of electrophysiology data. Therefore, M/EEG-specific routines
-such as loading M/EEG file types, source localization, ..., are currently not
-covered by Syncopy. For a Python toolbox tailored to M/EEG data analysis, see
+such as loading M/EEG file types, source localization, etc. are currently not
+included in Syncopy. For a Python toolbox tailored to M/EEG data analysis, see
 for example the `MNE Project <https://www.martinos.org/mne/>`_.
 
 .. contents::
     Contents
     :local:
 
-Translating MATLAB code to Python
+Translating MATLAB Code to Python
 ---------------------------------
 
 For translating code from MATLAB to Python there are several guides, e.g.
@@ -27,26 +27,68 @@ For translating code from MATLAB to Python there are several guides, e.g.
 * `NumPy for Matlab users <https://docs.scipy.org/doc/numpy/user/numpy-for-matlab-users.html>`_
 * `MATLAB to Python - A Migration Guide by Enthought <https://www.enthought.com/white-paper-matlab-to-python>`_
 
-Key differences between Python and MATLAB
+Key Differences between Python and MATLAB
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 While the above links cover differences between Python and MATLAB to a great
 extent, we highlight here what we think are the most important differences:
 
-* Indexing is different: Python array indexing starts at 0. The end of a range
-  in Python is not included
-* Data in Python is not necessarily copied and may be manipulated in-place.
-* The powerful `import system of Python <https://docs.python.org/3/reference/import.html>`_
+* Indexing is different - Python array indexing starts at 0:
+
+  >>> x = [1, 2, 3, 4]
+  >>> x[0]
+  1
+
+  Python ranges are half-open intervals ``[left, right)``, i.e., the right boundary 
+  is not included:
+
+  >>> list(range(1, 4))
+  [1, 2, 3]
+  
+* Data in Python is not necessarily copied and may be manipulated in-place:
+
+  >>> x = [1, 2, 3, 4]
+  >>> y = x
+  >>> x[0] = -1
+  >>> y
+  [-1, 2, 3, 4]
+
+  To prevent this an explicit copy of a `list`, `numpy.array`, etc. can be requested:
+
+  >>> x = [1, 2,3 ,4]
+  >>> y = list(x)
+  >>> x[0] = -1
+  >>> y 
+  [1, 2, 3, 4]
+
+* Python's powerful `import system <https://docs.python.org/3/reference/import.html>`_
   allows simple function names (e.g., :func:`~syncopy.load`) without worrying
-  about overwriting built-in functions.
+  about overwriting built-in functions
+  
+  >>> import syncopy as spy
+  >>> import numpy as np 
+  >>> spy.load 
+  <function syncopy.io.load_spy_container.load(filename, tag=None, dataclass=None, checksum=False, mode='r+', out=None)
+  >>> np.load
+  <function numpy.load(file, mmap_mode=None, allow_pickle=False, fix_imports=True, encoding='ASCII')>
+  
 * `Project-specific environments <https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html>`_
-  allow reproducable and customizable working environments.
-
-Translating FieldTrip calls to Syncopy
+  allow reproducible and customizable work setups.
+
+  .. code-block:: bash
+  
+      $ conda activate np17
+      $ python -c "import numpy; print(numpy.version.version)"
+      1.17.2
+      $ conda activate np15
+      $ python -c "import numpy; print(numpy.version.version)"
+      1.15.4
+
+Translating FieldTrip Calls to Syncopy
 --------------------------------------
 
 Using a FieldTrip function in MATLAB usually works via constructing a ``cfg``
-``struct`` that contains all configured parameters:
+``struct`` that contains all necessary configuration parameters:
 
 .. code-block:: matlab
 
@@ -56,8 +98,9 @@ Using a FieldTrip function in MATLAB usually works via constructing a ``cfg``
     cfg.option2 = [10, 20];
     result = ft_something(cfg);
 
-In Syncopy this struct is a Python dictionary that can automatically be filled
-with the defaults of any function.
+Syncopy emulates this concept using a :class:`syncopy.StructDict` (really just a
+slightly modified Python dictionary) that can automatically be filled with 
+default settings of any function.
 
 .. code-block:: python
 
@@ -67,9 +110,9 @@ with the defaults of any function.
     # or
     cfg.option1 = True
     cfg.option2 = [10, 20]
-    result = spy.something(cfg=cfg)
+    result = spy.something(cfg)
 
-A FieldTrip power spectrum in Syncopy
+A FieldTrip Power Spectrum in Syncopy
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 For example, a power spectrum calculated with FieldTrip via
@@ -86,7 +129,7 @@ For example, a power spectrum calculated with FieldTrip via
 
 can be computed in Syncopy with
 
-.. code-block:: matlab
+.. code-block:: python
       
     cfg = spy.get_defaults(spy.freqanalysis)
     cfg.method = 'mtmfft';
@@ -97,17 +140,16 @@ can be computed in Syncopy with
     spec = spy.freqanalysis(cfg, data)
 
 
-Key differences between FieldTrip and Syncopy
+Key Differences between FieldTrip and Syncopy
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 * FieldTrip has more features. Syncopy is still in early development and will
-  never cover the rich featureset of FieldTrip.
-* FieldTrip supports many data formats. Syncopy
-* Syncopy data objects are never fully loaded into memory.
-
-
+  never cover the rich feature-set of FieldTrip.
+* FieldTrip supports many data formats. Syncopy currently only supports data import 
+  from FieldTrip (see below). 
+* Syncopy data objects use disk-streaming and are thus never fully loaded into memory.
 
-Exchanging data between FieldTrip and Syncopy
+Exchanging Data between FieldTrip and Syncopy
 ---------------------------------------------
 
 Data created with Syncopy can be loaded into MATLAB using the `matlab-syncopy
diff --git a/doc/source/user/users.rst b/doc/source/user/users.rst
index 881de24cf..b426f0558 100644
--- a/doc/source/user/users.rst
+++ b/doc/source/user/users.rst
@@ -1,9 +1,9 @@
-***********************
-User's guide to Syncopy
-***********************
+******************
+Syncopy User Guide
+******************
 
 This section of the Syncopy documentation contains information aimed at users
-who primarily want to apply existing analysis functions on their data. This
+who primarily want to apply existing analysis functions to their data. This
 usually entails writing analysis scripts operating on a given list of data
 files.
 
diff --git a/syncopy/datatype/base_data.py b/syncopy/datatype/base_data.py
index 938a91ed6..54da8c0c5 100644
--- a/syncopy/datatype/base_data.py
+++ b/syncopy/datatype/base_data.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-01-07 09:22:33
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-10-08 14:08:32>
+# Last modification time: <2019-10-11 12:22:02>
 
 # Builtin/3rd party package imports
 import getpass
@@ -1221,75 +1221,21 @@ class Selector():
     data : Syncopy data object
         A non-empty Syncopy data object
     select : dict or :class:`~syncopy.datatype.base_data.StructDict` or None
-        Python dictionary or Syncopy :class:`~syncopy.datatype.base_data.StructDict` 
-        formatted for data selection. Supported keys are
+        Dictionary or :class:`~syncopy.datatype.base_data.StructDict` with keys
+        specifying data selectors. **Note**: some keys are only valid for certain types
+        of Syncopy objects, e.g., "freqs" is not a valid selector for an 
+        :class:`~syncopy.AnalogData` object. Supported keys are (please see 
+        :func:`~syncopy.selectdata` for a detailed description of each selector)
     
-        * 'trials' : list of integers
-          trial numbers to be selected; can include repetitions and need not
-          be sorted (e.g., ``trials = [0, 1, 0, 0, 2]`` is valid) but must
-          be finite and not NaN. 
+        * 'trials' : list (integers)
         * 'channels' : list (integers or strings), slice or range
-          channel-specification; can be a list of channel names
-          (``['channel3', 'channel1']``), a list of channel indices (``[3, 5]``),
-          slice (``slice(3, 10)``) or range (``range(3, 10)``). Note that
-          following Python conventions, channels are counted starting at zero, and
-          range and slice selections are half-open intervals of the form `[low, high)`, 
-          i.e., low is included , high is excluded. Thus, ``channels = [0, 1, 2]``
-          or ``channels = slice(0, 3)`` selects the first up to (and including)
-          the third channel. Selections can be unsorted and may include
-          repetitions but must match exactly, be finite and not NaN. 
-        * 'toi' : list
-          time-points to be selected (in seconds) in each trial. Timing is
-          expected to be on a by-trial basis (e.g., relative to trigger onsets). 
-          Selections can be approximate, unsorted and may include repetitions
-          but must be finite and not NaN. Fuzzy matching is performed for
-          approximate selections (i.e., selected time-points are close but not
-          identical to timing information found in `data`) using a nearest-
-          neighbor search for elements of `toi` in `data.time`. 
-        * 'toilim' : list
-          time-window ``[tmin, tmax]`` (in seconds) to be extracted from
-          each trial. Window specifications must be sorted (e.g., ``[2.2, 1.1]``
-          is invalid) and not NaN but may be unbounded (e.g., ``[1.1, np.inf]``
-          is valid). Edges `tmin` and `tmax` are included in the selection. 
-        * 'foi' : list
-          frequencies to be selected (in Hz). Selections can be approximate,
-          unsorted and may include repetitions but must be finite and not NaN.
-          Fuzzy matching is performed for approximate selections (i.e., selected
-          frequencies are close but not identical to frequencies found in
-          `data`) using a nearest-neighbor search for elements of `foi` in
-          `data.freq`. 
-        * 'foilim' : list
-          frequency-window ``[fmin, fmax]`` (in Hz) to be extracted. Window
-          specifications must be sorted (e.g., ``[90, 70]`` is invalid) and
-          not NaN but may be unbounded (e.g., ``[-np.inf, 60.5]`` is valid).
-          Edges `fmin` and `fmax` are included in the selection. 
+        * 'toi' : list (floats)
+        * 'toilim' : list (floats [tmin, tmax])
+        * 'foi' : list (floats)
+        * 'foilim' : list (floats [fmin, fmax])
         * 'tapers' : list (integers or strings), slice or range
-          taper-specification; can be a list of taper names
-          (``['dpss-win-1', 'dpss-win-3']``), a list of taper indices
-          (``[3, 5]``), slice (``slice(3, 10)``) or range (``range(3, 10)``). Note that
-          following Python conventions, tapers are counted starting at zero, and
-          range and slice selections are half-open intervals of the form `[low, high)`, 
-          i.e., low is included , high is excluded. Thus, ``tapers = [0, 1, 2]``
-          or ``tapers = slice(0, 3)`` selects the first up to (and including)
-          the third taper. Selections can be unsorted and may include
-          repetitions but must match exactly, be finite and not NaN. 
         * 'units' : list (integers or strings), slice or range
-          unit-specification; can be a list of unit names
-          (``['unit10', 'unit3']``), a list of unit indices (``[3, 5]``),
-          slice (``slice(3, 10)``) or range (``range(3, 10)``). Note that
-          following Python conventions, units are counted starting at zero, and
-          range and slice selections are half-open intervals of the form `[low, high)`, 
-          i.e., low is included , high is excluded. Thus, ``units = [0, 1, 2]``
-          or ``units = slice(0, 3)`` selects the first up to (and including)
-          the third unit. Selections can be unsorted and may include
-          repetitions but must match exactly, be finite and not NaN.
-        * 'eventids' : list of integers, slice or range
-          event-id-specification; can be a list of event-id codes (``[2, 0, 1]``),
-          slice (``slice(0, 2)``) or range (``range(0, 2)``). Note that
-          following Python conventions, range and slice selections are half-open 
-          intervals of the form `[low, high)`, i.e., low is included , high is excluded. 
-          Selections can be unsorted and may include repetitions but must match exactly, be
-          finite and not NaN.
+        * 'eventids' : list (integers), slice or range
 
         Any property of `data` that is not specifically accessed via one of
         the above keys is taken as is, e.g., ``select = {'trials': [1, 2]}``
diff --git a/syncopy/datatype/continuous_data.py b/syncopy/datatype/continuous_data.py
index 12b807f7a..446e4495e 100644
--- a/syncopy/datatype/continuous_data.py
+++ b/syncopy/datatype/continuous_data.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-03-20 11:11:44
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-10-08 13:56:53>
+# Last modification time: <2019-10-11 13:38:17>
 """Uniformly sampled (continuous data).
 
 This module holds classes to represent data with a uniformly sampled time axis.
@@ -21,7 +21,7 @@
 
 # Local imports
 from .base_data import BaseData, VirtualData, FauxTrial
-from .data_methods import _selectdata_continuous, definetrial
+from .data_methods import definetrial
 from syncopy.shared.parsers import scalar_parser, array_parser, io_parser
 from syncopy.shared.errors import SPYValueError, SPYIOError
 import syncopy as spy
@@ -108,7 +108,6 @@ def selectdata(self, trials=None, deepcopy=False, **kwargs):
         """
         Docstring mostly pointing to ``selectdata``
         """
-        return _selectdata_continuous(self, trials, deepcopy, **kwargs)
 
     # Helper function that reads a single trial into memory
     @staticmethod
diff --git a/syncopy/datatype/data_methods.py b/syncopy/datatype/data_methods.py
index 6a58f5b8d..3b1bf38b3 100644
--- a/syncopy/datatype/data_methods.py
+++ b/syncopy/datatype/data_methods.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-02-25 11:30:46
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-10-08 16:52:11>
+# Last modification time: <2019-10-11 13:47:16>
 
 # Builtin/3rd party package imports
 import numbers
@@ -18,308 +18,216 @@
 __all__ = ["selectdata", "definetrial", "padding"]
 
 
-def selectdata(obj, trials=None, deepcopy=False, exact_match=False, **kwargs):
+def selectdata(data, trials=None, channels=None, toi=None, toilim=None, foi=None,
+               foilim=None, tapers=None, units=None, eventids=None):
     """
-    Docstring coming soon(ish)
-
-    (tuple) -> value-range selection (e.g., freq=(5,10) frequency range b/w 5-10Hz)
-    slice -> index-range selection (e.g. freq=slice(5,10), frequencies no. 5 - 9)
-    [list-like] -> multi-index-selection (e.g. freq=[5,7,10], frequencies no. 5, 7, 10)
-    float -> single-value selection (e.g. freq=5.0, frequency of 5Hz)
-    int -> single-index selection (e.g., freq=5, 4th frequency in spectrum)
-    """
-
-    # Depending on input object, pass things right on to actual working routines
-    if any(["ContinuousData" in str(base) for base in obj.__class__.__bases__]):
-        return _selectdata_continuous(obj, trials, deepcopy, exact_match, **kwargs)
-    elif any(["DiscreteData" in str(base) for base in obj.__class__.__bases__]):
-        raise NotImplementedError("Coming soon")
-    else:
-        raise SPYTypeError(obj, varname="obj", expected="SyNCoPy data object")
+    Select a subset of data from a Syncopy object
+    
+    **##################### Hello Alpha users! #####################** 
     
+    This function is not implemented yet - the version of 
+    Syncopy you're using only supports in-place selection via a `select` dictionary. 
+    The keys supported by the `select` dictionary are identical to the keyword
+    arguments discussed below, e.g., the following code snippet works
+    
+    >>> select = {"toilim" : [-0.25, 0]}
+    >>> cfg = spy.get_defaults(spy.freqanalysis)
+    >>> cfg.select = select
+    >>> spy.freqanalysis(cfg, data)
 
-def _selectdata_continuous(obj, trials, deepcopy, exact_match, **kwargs):
+    **##############################################################** 
 
-    # Make sure provided object is inherited from `ContinuousData`
-    if not any(["ContinuousData" in str(base) for base in obj.__class__.__mro__]):
-        raise SPYTypeError(obj, varname="obj", expected="SpkeWave ContinuousData object")
+    
+    **Usage summary**
+    
+    List of Syncopy data objects and respective valid data selectors:
+    
+    :class:`~syncopy.AnalogData` : trials, channels, toi/toilim
+        Examples
         
-    # Convert provided selectors to array indices
-    trials, selectors = _makeidx(obj, trials, deepcopy, exact_match, **kwargs)
-
-    # Make sure our Boolean switches are actuall Boolean
-    if not isinstance(deepcopy, bool):
-        raise SPYTypeError(deepcopy, varname="deepcopy", expected="bool")
-    if not isinstance(exact_match, bool):
-        raise SPYTypeError(exact_match, varname="exact_match", expected="bool")
-
-    # If time-based selection is requested, make some necessary preparations
-    if "time" in selectors.keys():
-        time_sel = selectors.pop("time")
-        time_ref = np.array(obj.time[trials[0]])
-        time_slice = [None, None]
-        if isinstance(time_sel, tuple):
-            if len(time_sel) != 2:
-                raise SPYValueError(legal="two-element tuple",
-                                    actual="tuple of length {}".format(str(len(time_sel))),
-                                    varname="time")
-            for tk, ts in enumerate(time_sel):
-                if ts is not None:
-                    if not exact_match:
-                        time_slice[tk] = time_ref[np.abs(time_ref - ts).argmin()]
-                    else:
-                        try:
-                            time_slice[tk] = list(time_ref).index(ts)
-                        except:
-                            raise SPYValueError(legal="exact time-point", actual=ts)
-            time_slice = slice(*time_slice)
-        elif isinstance(time_sel, slice):
-            if not len(range(*time_sel.indices(time_ref.size))):
-                lgl = "non-empty time-selection"
-                act = "empty selector"
-                raise SPYValueError(legal=lgl, varname=lbl, actual=act)
-            time_slice = slice(time_sel.start, time_sel.stop, time_sel.step)
-        elif isinstance(time_sel, (list, np.ndarray)):
-            if not set(time_sel).issubset(range(time_ref.size))\
-               or np.unique(np.diff(time_sel)).size != 1:
-                vname = "contiguous list of time-points"
-                raise SPYValueError(legal=lgl, varname=vname)
-            time_slice = slice(time_sel[0], time_sel[-1] + 1)
-        else:
-            raise SPYTypeError(time_sel, varname="time-selection",
-                               expected="tuple, slice or list-like")
-    else:
-        time_slice = slice(0, None)
-
-        # SHALLOWCOPY
-        sampleinfo = np.empty((trials.size, 2))
-        for sk, trl in enumerate(trials):
-            sinfo = range(*obj.sampleinfo[trl, :])[time_slice]
-            sampleinfo[sk, :] = [sinfo.start, sinfo.stop - 1]
+        >>> spy.selectdata(data, trials=[0, 3, 5], channels=["channel01", "channel02"])
+        >>> cfg = spy.StructDict() 
+        >>> cfg.trials = [5, 3, 0]; cfg.toilim = [0.25, 0.5]
+        >>> spy.selectdata(cfg, data)
         
-            
-    # Build array-multi-index and shape of target array based on dimensional selectors
-    idx = [slice(None)] * len(obj.dimord)
-    target_shape = list(obj.data.shape)
-    for lbl, selector in selectors.items():
-        id = obj.dimord.index(lbl)
-        idx[id] = selector
-        if isinstance(selector, slice):
-            target_shape[id] = len(range(*selector.indices(obj.data.shape[id])))
-        elif isinstance(selector, int):
-            target_shape[id] = 1
-        else:
-            if not deepcopy:
-                deepcopy = True
-            target_shape[id] = len(selector)
-    tid = obj.dimord.index("time")
-    idx[tid] = time_slice
+    :class:`~syncopy.SpectralData` : trials, channels, toi/toilim, foi/foilim, tapers
+        Examples
+        
+        >>> spy.selectdata(data, trials=[0, 3, 5], channels=["channel01", "channel02"])
+        >>> cfg = spy.StructDict()
+        >>> cfg.foi = [30, 40, 50]; cfg.tapers = slice(2, 4)
+        >>> spy.selectdata(cfg, data)
+        
+    :class:`~syncopy.EventData` : trials, toi/toilim, eventids
+        Examples
+        
+        >>> spy.selectdata(data, toilim=[-1, 2.5], eventids=[0, 1])
+        >>> cfg = spy.StructDict()
+        >>> cfg.trials = [0, 0, 1, 0]; cfg.eventids = slice(2, None)
+        >>> spy.selectdata(cfg, data)
+        
+    :class:`~syncopy.SpikeData` : trials, toi/toilim, units
+        Examples
+        
+        >>> spy.selectdata(data, toilim=[-1, 2.5], units=range(0, 10))
+        >>> cfg = spy.StructDict()
+        >>> cfg.toi = [1.25, 3.2]; cfg.trials = [0, 1, 2, 3]
+        >>> spy.selectdata(cfg, data)
     
-    # Allocate shallow copy for target
-    target = obj.copy()
-
-    # First, we handle deep copies of `obj`
-    if deepcopy:
-
-        # Re-number trials: offset correction + close gaps b/w trials
-        sampleinfo = obj.sampleinfo[trials, :] - obj.sampleinfo[trials[0], 0]
-        stop = 0
-        for sk in range(sampleinfo.shape[0]):
-            sinfo = range(*sampleinfo[sk, :])[time_slice]
-            nom_len = sinfo.stop - sinfo.start
-            start = min(sinfo.start, stop)
-            real_len = min(nom_len, sinfo.stop - stop)
-            sampleinfo[sk, :] = [start, start + nom_len]
-            stop = start + real_len + 1
-            
-        # Based on requested trials, set shape of target array (accounting
-        # for overlapping trials)
-        target_shape[tid] = sampleinfo[-1][1]
-
-        # Allocate target memorymap
-        target._filename = obj._gen_filename()
-        target_dat = open_memmap(target._filename, mode="w+",
-                                 dtype=obj.data.dtype, shape=target_shape)
-        del target_dat
-
-        # The crucial part here: `idx` is a "local" by-trial index of the
-        # form `[:,:,2:10]` whereas `target_idx` has to keep track of the
-        # global progression in `target_data`
-        for sk, trl in enumerate(trials):
-            source_trl = self._copy_trial(trialno,
-                                            obj._filename,
-                                            obj.trl,
-                                            obj.hdr,
-                                            obj.dimord,
-                                            obj.segmentlabel)
-            target_idx[tid] = slice(*sampleinfo[sk, :])
-            target_dat = open_memmap(target._filename, mode="r+")[target_idx]
-            target_dat[...] = source_trl[idx]
-            del target_dat
-
-        # FIXME: Clarify how we want to do this...
-        target._dimlabels["sample"] = sampleinfo
-
-        # Re-number samples if necessary
+    **Note** Any property that is not specifically accessed via one of the provided
+    selectors is taken as is, e.g., ``spy.selectdata(data, trials=[1, 2])``
+    selects the entire contents of trials no. 2 and 3, while 
+    ``spy.selectdata(data, channels=range(0, 50))`` selects the first 50 channels
+    of `data` across all defined trials. Consequently, if no keywords are specified,
+    the entire contents of `data` is selected. 
+    
+    Full documentation below. 
+    
+    Parameters
+    ----------
+    data : Syncopy data object
+        A non-empty Syncopy data object. **Note** the type of `data` determines
+        which keywords can be used.  Some keywords are only valid for certain 
+        types of Syncopy objects, e.g., "freqs" is not a valid selector for an 
+        :class:`~syncopy.AnalogData` object. 
+    trials : list (integers) or None
+        List of integers representing trial numbers to be selected; can include 
+        repetitions and need not be sorted (e.g., ``trials = [0, 1, 0, 0, 2]`` 
+        is valid) but must be finite and not NaN. If `trials` is `None`, all trials 
+        are selected. 
+    channels : list (integers or strings), slice, range or None
+        Channel-selection; can be a list of channel names (``['channel3', 'channel1']``), 
+        a list of channel indices (``[3, 5]``), a slice (``slice(3, 10)``) or 
+        range (``range(3, 10)``). Note that following Python conventions, channels 
+        are counted starting at zero, and range and slice selections are half-open 
+        intervals of the form `[low, high)`, i.e., low is included , high is 
+        excluded. Thus, ``channels = [0, 1, 2]`` or ``channels = slice(0, 3)`` 
+        selects the first up to (and including) the third channel. Selections can 
+        be unsorted and may include repetitions but must match exactly, be finite 
+        and not NaN. If `channels` is `None`, all channels are selected. 
+    toi : list (floats) or None
+        Time-points to be selected (in seconds) in each trial. Timing is expected 
+        to be on a by-trial basis (e.g., relative to trigger onsets). Selections 
+        can be approximate, unsorted and may include repetitions but must be 
+        finite and not NaN. Fuzzy matching is performed for approximate selections 
+        (i.e., selected time-points are close but not identical to timing information 
+        found in `data`) using a nearest-neighbor search for elements of `toi`. 
+        If `toi` is `None`, the entire time-span in each trial is selected. 
+    toilim : list (floats [tmin, tmax]) or None
+        Time-window ``[tmin, tmax]`` (in seconds) to be extracted from each trial. 
+        Window specifications must be sorted (e.g., ``[2.2, 1.1]`` is invalid) 
+        and not NaN but may be unbounded (e.g., ``[1.1, np.inf]`` is valid). Edges 
+        `tmin` and `tmax` are included in the selection. 
+        If `toilim` is `None`, the entire time-span in each trial is selected. 
+    foi : list (floats) or None
+        Frequencies to be selected (in Hz). Selections can be approximate, unsorted 
+        and may include repetitions but must be finite and not NaN. Fuzzy matching 
+        is performed for approximate selections (i.e., selected frequencies are 
+        close but not identical to frequencies found in `data`) using a nearest-
+        neighbor search for elements of `foi` in `data.freq`. If `foi` is `None`, 
+        all frequencies are selected. 
+    foilim : list (floats [fmin, fmax]) or None
+        Frequency-window ``[fmin, fmax]`` (in Hz) to be extracted. Window 
+        specifications must be sorted (e.g., ``[90, 70]`` is invalid) and not NaN 
+        but may be unbounded (e.g., ``[-np.inf, 60.5]`` is valid). Edges `fmin` 
+        and `fmax` are included in the selection. If `foilim` is `None`, all 
+        frequencies are selected. 
+    tapers : list (integers or strings), slice, range or None
+        Taper-selection; can be a list of taper names (``['dpss-win-1', 'dpss-win-3']``), 
+        a list of taper indices (``[3, 5]``), a slice (``slice(3, 10)``) or range 
+        (``range(3, 10)``). Note that following Python conventions, tapers are 
+        counted starting at zero, and range and slice selections are half-open 
+        intervals of the form `[low, high)`, i.e., low is included , high is 
+        excluded. Thus, ``tapers = [0, 1, 2]`` or ``tapers = slice(0, 3)`` selects 
+        the first up to (and including) the third taper. Selections can be unsorted 
+        and may include repetitions but must match exactly, be finite and not NaN. 
+        If `tapers` is `None`, all tapers are selected. 
+    units : list (integers or strings), slice, range or None
+        Unit-selection; can be a list of unit names (``['unit10', 'unit3']``), a 
+        list of unit indices (``[3, 5]``), a slice (``slice(3, 10)``) or range 
+        (``range(3, 10)``). Note that following Python conventions, units are 
+        counted starting at zero, and range and slice selections are half-open 
+        intervals of the form `[low, high)`, i.e., low is included , high is 
+        excluded. Thus, ``units = [0, 1, 2]`` or ``units = slice(0, 3)`` selects 
+        the first up to (and including) the third unit. Selections can be unsorted 
+        and may include repetitions but must match exactly, be finite and not NaN.
+        If `units` is `None`, all units are selected. 
+    eventids : list (integers), slice, range or None
+        Event-ID-selection; can be a list of event-id codes (``[2, 0, 1]``), slice 
+        (``slice(0, 2)``) or range (``range(0, 2)``). Note that following Python 
+        conventions, range and slice selections are half-open intervals of the 
+        form `[low, high)`, i.e., low is included , high is excluded. Selections 
+        can be unsorted and may include repetitions but must match exactly, be
+        finite and not NaN. If `eventids` is `None`, all events are selected. 
         
-
-        # By-sample copy
-        if trials is None:
-            mem_size = np.prod(target_shape)*self.data.dtype*1024**(-2)
-            if mem_size >= 100:
-                spw_warning("Memory footprint of by-sample selection larger than 100MB",
-                            caller="SyNCoPy core:select")
-            target_dat[...] = self.data[idx]
-            del target_dat
-            self.clear()
-
-        # By-trial copy
-        else:
-            del target_dat
-            sid = self.dimord.index(self.segmentlabel)
-            target_shape[sid] = sum([shp[sid] for shp in np.array(self.shapes)[trials]])
-            target_idx = [slice(None)] * len(self.dimord)
-            target_sid = 0
-            for trialno in trials:
-                source_trl = self._copy_trial(trialno,
-                                                self._filename,
-                                                self.trl,
-                                                self.hdr,
-                                                self.dimord,
-                                                self.segmentlabel)
-                trl_len = source_trl.shape[sid]
-                target_idx[sid] = slice(target_sid, target_sid + trl_len)
-                target_dat = open_memmap(target._filename, mode="r+")[target_idx]
-                target_dat[...] = source_trl[idx]
-                del target_dat
-                target_sid += trl_len
-
-    # Shallow copy: simply create a view of the source memmap
-    # Cover the case: channel=3, all trials!
-    else:
-        target._data = open_memmap(self._filename, mode="r")[idx]
-
-    return target
-
-
-def _selectdata_discrete():
-    pass
-
-
-def _makeidx(obj, trials, deepcopy, exact_match, **kwargs):
-    """
-    Local input parser
+    Returns
+    -------
+    dataselection : Syncopy data object
+        Syncopy data object of the same type as `data` but containing only the 
+        subset specified by provided selectors. 
+        
+    Notes
+    -----
+    This routine represents a convenience function for creating new Syncopy objects
+    based on existing data entities. However, in many situations, the creation 
+    of a new object (and thus the allocation of additional disk-space) might not 
+    be necessary: all Syncopy compute kernels, such as :func:`~syncopy.freqanalysis`,
+    support **in-place data selection**. 
+    
+    Consider the following example: assume `data` is an :class:`~syncopy.AnalogData` 
+    object representing 220 trials of LFP recordings containing baseline (between 
+    second -0.25 and 0) and stimulus-on data (on the interval [0.25, 0.5]). 
+    To compute the baseline spectrum, data-selection does **not**
+    have to be performed before calling :func:`~syncopy.freqanalysis` but instead
+    can be done in-place:
+    
+    >>> import syncopy as spy
+    >>> cfg = spy.get_defaults(spy.freqanalysis)
+    >>> cfg.method = 'mtmfft'
+    >>> cfg.taper = 'dpss'
+    >>> cfg.output = 'pow'
+    >>> cfg.tapsmofrq = 10
+    >>> # define baseline/stimulus-on ranges
+    >>> baseSelect = {"toilim": [-0.25, 0]}
+    >>> stimSelect = {"toilim": [0.25, 0.5]}
+    >>> # in-place selection of baseline interval performed by `freqanalysis`
+    >>> cfg.select = baseSelect
+    >>> baselineSpectrum = spy.freqanalysis(cfg, data)
+    >>> # in-place selection of stimulus-on time-frame performed by `freqanalysis`
+    >>> cfg.select = stimSelect
+    >>> stimonSpectrum = spy.freqanalysis(cfg, data)
+    
+    Especially for large data-sets, in-place data selection performed by Syncopy's
+    compute kernels does not only save disk-space but can significantly increase 
+    performance.  
+    
+    Examples
+    --------
+    Use :func:`~syncopy.tests.misc.generate_artificial_data` to create a synthetic 
+    :class:`syncopy.AnalogData` object. 
+    
+    >>> from syncopy.tests.misc import generate_artificial_data
+    >>> adata = generate_artificial_data(nTrials=10, nChannels=32) 
+    
+    Assume a hypothetical trial onset at second 2.0 with the first second of each
+    trial representing baseline recordings. To extract only the stimulus-on period
+    from `adata`, one could use
+    
+    >>> stimon = spy.selectdata(adata, toilim=[2.0, np.inf])
+    
+    Note that this is equivalent to
+    
+    >>> stimon = adata.selectdata(toilim=[2.0, np.inf])
+    
+    See also
+    --------
+    :meth:`syncopy.AnalogData.selectdata` : corresponding class method
+    :meth:`syncopy.SpectralData.selectdata` : corresponding class method
+    :meth:`syncopy.EventData.selectdata` : corresponding class method
+    :meth:`syncopy.SpikeData.selectdata` : corresponding class method
     """
     
-    # Make sure `obj` is a valid `BaseData`-like object
-    try:
-        spw_basedata_parser(obj, varname="obj", writable=None, empty=False)
-    except Exception as exc:
-        raise exc
-
-    # Make sure the input dimensions make sense
-    if not set(kwargs.keys()).issubset(self.dimord):
-        raise SPYValueError(legal=self.dimord, actual=list(kwargs.keys()))
-
-    # Process `trials`
-    if trials is not None:
-        if isinstance(trials, tuple):
-            start = trials[0]
-            if trials[1] is None:
-                stop = self.trl.shape[0]
-            else:
-                stop = trials[1]
-            trials = np.arange(start, stop)
-        if not set(trials).issubset(range(self.trl.shape[0])):
-            lgl = "trial selection between 0 and {}".format(str(self.trl.shape[0]))
-            raise SPYValueError(legal=lgl, varname="trials")
-        if isinstance(trials, int):
-            trials = np.array([trials])
-    else:
-        trials = np.arange(self.trl.shape[0])
-
-    # Time-based selectors work differently for continuous/discrete data,
-    # handle those separately from other dimensional labels
-    selectors = {}
-    if "time" in kwargs.keys():
-        selectors["time"] = kwargs.pop("time")
-
-    # Calculate indices for each provided dimensional selector
-    for lbl, selection in kwargs.items():
-        ref = np.array(self.dimord[lbl])
-        lgl = "component of `obj.{}`".format(lbl)
-
-        # Value-range selection
-        if isinstance(selection, tuple):
-            if len(selection) != 2:
-                raise SPYValueError(legal="two-element tuple",
-                                    actual="tuple of length {}".format(str(len(selection))),
-                                    varname=lbl)
-            bounds = [None, None]
-            for sk, sel in enumerate(selection):
-                if isinstance(sel, str):
-                    try:
-                        bounds[sk] = list(ref).index(sel)
-                    except:
-                        raise SPYValueError(legal=lgl, actual=sel)
-                elif isinstance(sel, numbers.Number):
-                    if not exact_match:
-                        bounds[sk] = ref[np.abs(ref - sel).argmin()]
-                    else:
-                        try:
-                            bounds[sk] = list(ref).index(sel)
-                        except:
-                            raise SPYValueError(legal=lgl, actual=sel)
-                elif sel is None:
-                    if sk == 0:
-                        bounds[sk] = ref[0]
-                    if sk == 1:
-                        bounds[sk] = ref[-1]
-                else:
-                    raise SPYTypeError(sel, varname=lbl, expected="string, number or None")
-            bounds[1] += 1
-            selectors[lbl] = slice(*bounds)
-
-        # Index-range selection
-        elif isinstance(selection, slice):
-            if not len(range(*selection.indices(ref.size))):
-                lgl = "non-empty selection"
-                act = "empty selector"
-                raise SPYValueError(legal=lgl, varname=lbl, actual=act)
-            selectors[lbl] = slice(selection.start, selection.stop, selection.step)
-            
-        # Multi-index selection: try to convert contiguous lists to slices
-        elif isinstance(selection, (list, np.ndarray)):
-            if not set(selection).issubset(range(ref.size)):
-                vname = "list-selector for `obj.{}`".format(lbl)
-                raise SPYValueError(legal=lgl, varname=vname)
-            if np.unique(np.diff(selection)).size == 1:
-                selectors[lbl] = slice(selection[0], selection[-1] + 1)
-            else:
-                selectors[lbl] = list(selection)
-
-        # Single-value selection
-        elif isinstance(selection, float):
-            if not exact_match:
-                selectors[lbl] = ref[np.abs(ref - selection).argmin()]
-            else:
-                try:
-                    selectors[lbl] = list(ref).index(selection)
-                except:
-                    raise SPYValueError(legal=lgl, actual=selection)
-
-        # Single-index selection
-        elif isinstance(selection, int):
-            if selection not in range(ref.size):
-                raise SPYValueError(legal=lgl, actual=selection)
-            selectors[lbl] = selection
-
-        # You had your chance...
-        else:
-            raise SPYTypeError(selection, varname=lbl,
-                               expected="tuple, list-like, slice, float or int")
-        
-    return selectors, trials
+    raise NotImplementedError("Coming soon!")
 
 
 def definetrial(obj, trialdefinition=None, pre=None, post=None, start=None,
@@ -378,9 +286,6 @@ def definetrial(obj, trialdefinition=None, pre=None, post=None, start=None,
         # define whole recording as single trial    
         definetrial(obj, trialdefinition=None)
     
-    
-
-    
     """
 
     # Start by vetting input object
@@ -676,7 +581,22 @@ def padding(data, padtype, pad="absolute", padlength=None, prepadlength=None,
     """
     Perform data padding on Syncopy object or :class:`numpy.ndarray`
     
-    Usage summary:
+    **Usage summary**
+    
+    Depending on the value of `pad` the following padding length specifications
+    are supported:
+    
+    +------------+----------------------+---------------+----------------------+----------------------+
+    | `pad`      | `data`               | `padlength`   | `prepadlength`       | `postpadlength`      |
+    +============+======================+===============+======================+======================+
+    | 'absolute' | Syncopy object/array | number        | `None`/`bool`        | `None`/`bool`        |
+    +------------+----------------------+---------------+----------------------+----------------------+
+    | 'relative' | Syncopy object/array | number/`None` | number/`None`/`bool` | number/`None`/`bool` |
+    +------------+----------------------+---------------+----------------------+----------------------+
+    | 'maxlen'   | Syncopy object       | `None`/`bool` | `None`/`bool`        | `None`/`bool`        |
+    +------------+----------------------+---------------+----------------------+----------------------+
+    | 'nextpow2' | Syncopy object/array | `None`/`bool` | `None`/`bool`        | `None`/`bool`        |
+    +------------+----------------------+---------------+----------------------+----------------------+
     
     * `data` can be either a Syncopy object containing multiple trials or a
       :class:`numpy.ndarray` representing a single trial
diff --git a/syncopy/datatype/discrete_data.py b/syncopy/datatype/discrete_data.py
index d5f1ac8b9..3f8f8555d 100644
--- a/syncopy/datatype/discrete_data.py
+++ b/syncopy/datatype/discrete_data.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-03-20 11:20:04
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-09-25 16:57:00>
+# Last modification time: <2019-10-11 13:38:26>
 
 # Builtin/3rd party package imports
 import numpy as np
@@ -12,7 +12,7 @@
 
 # Local imports
 from .base_data import BaseData, Indexer
-from .data_methods import _selectdata_discrete, definetrial
+from .data_methods import definetrial
 from syncopy.shared.parsers import scalar_parser, array_parser
 from syncopy.shared.errors import SPYValueError
 
@@ -108,7 +108,6 @@ def trialtime(self):
     def selectdata(self, trials=None, deepcopy=False, **kwargs):
         """Select parts of the data (:func:`syncopy.selectdata`)        
         """
-        return _selectdata_discrete(self, trials, deepcopy, **kwargs)
 
     # Helper function that grabs a single trial
     def _get_trial(self, trialno):
diff --git a/syncopy/examples/ex_specest.py b/syncopy/examples/ex_specest.py
index 2821014b1..4ff996c79 100644
--- a/syncopy/examples/ex_specest.py
+++ b/syncopy/examples/ex_specest.py
@@ -24,12 +24,12 @@
 import syncopy as spy
 
 # Import artificial data generator
-from syncopy.tests.misc import generate_artifical_data
+from syncopy.tests.misc import generate_artificial_data
 
 import dask.distributed as dd
 from time import time
 
-# sys.exit()
+sys.exit()
 
 if __name__ == "__main__":
 
@@ -110,7 +110,7 @@
     cfg.output = 'abs'
     cfg.tapsmofrq = 9.3
     cfg.keeptrials = True
-    artdata = generate_artifical_data(nTrials=2, nChannels=16, equidistant=True, inmemory=True)
+    artdata = generate_artificial_data(nTrials=2, nChannels=16, equidistant=True, inmemory=True)
     
     # artdata.save('test', overwrite=True)
     # bdata = spy.load('test')
@@ -237,9 +237,9 @@
     # sys.exit()
     # 
     # # FIXME: channel assignment is only temporarily necessary
-    # adata = generate_artifical_data(nTrials=20, nChannels=256, equidistant=False, overlapping=True)        # ~50MB
+    # adata = generate_artificial_data(nTrials=20, nChannels=256, equidistant=False, overlapping=True)        # ~50MB
     # adata.channel = ["channel" + str(i + 1) for i in range(256)]
-    # # adata = generate_artifical_data(nTrials=100, nChannels=1024, equidistant=False)        # ~1.14GB
+    # # adata = generate_artificial_data(nTrials=100, nChannels=1024, equidistant=False)        # ~1.14GB
     # # adata.channel = ["channel" + str(i + 1) for i in range(1024)]
     # 
     # 
diff --git a/syncopy/examples/ex_wavelet.py b/syncopy/examples/ex_wavelet.py
index 5f0fd973e..6dea5c115 100644
--- a/syncopy/examples/ex_wavelet.py
+++ b/syncopy/examples/ex_wavelet.py
@@ -22,7 +22,7 @@
 
 
 if __name__ == "__main__":
-    data = spy.tests.misc.generate_artifical_data(nChannels=5, nTrials=37)
+    data = spy.tests.misc.generate_artificial_data(nChannels=5, nTrials=37)
     spec, lastres = spy.wavelet(data)
     import matplotlib.pyplot as plt
     plt.ion()
diff --git a/syncopy/statistics/timelockanalysis.py b/syncopy/statistics/timelockanalysis.py
index 967af8d33..e15bc800b 100644
--- a/syncopy/statistics/timelockanalysis.py
+++ b/syncopy/statistics/timelockanalysis.py
@@ -3,8 +3,8 @@
 # 
 # 
 # Created: 2019-10-01 11:39:36
-# Last modified by: Joscha Schmiedt [joscha.schmiedt@esi-frankfurt.de]
-# Last modification time: <2019-10-08 13:37:29>
+# Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
+# Last modification time: <2019-10-11 16:10:44>
 
 import os
 import numpy as np
@@ -16,26 +16,27 @@
 __all__ = ["timelockanalysis"]
 
 def timelockanalysis(data, trials=None):
-    """Prototype function for averaging AnalogData across trials
+    """Prototype function for averaging :class:`~syncopy.AnalogData` across trials
     
     Parameters
     ----------
-    data : :class:`syncopy.AnalogData` object
-        Syncopy data object to be averaged across trials
+    data : Syncopy :class:`~syncopy.AnalogData` object
+        Syncopy :class:`~syncopy.AnalogData` object to be averaged across trials
     trials : :class:`numpy.ndarray`
         Array of trial indices to be used for averaging
     
     Returns
     -------
-    dict
-        Dictionary with keys "avg", "var", "dof" "time", "channel" for average
-        variance, degrees of freedom, time axis, and channel labels
-
-    Note
-    ----
-    This function is merely a proof of concept for averaging across trials with
-    an online algorithm. The final version for release will change severely.
-
+    resdict : dict
+        Dictionary with keys "avg", "var", "dof" "time", "channel" representing
+        calculated average (across `trials`), variance (across `trials`), degrees 
+        of freedom, time axis, and channel labels
+
+    Notes
+    -----
+    This function is merely a proof of concept implementation for averaging data 
+    across trials with using an efficient online algorithm. The final version 
+    for release will change substantially.
     """
             
     # FIXME: There are currently no tests for this function.    
diff --git a/syncopy/tests/misc.py b/syncopy/tests/misc.py
index 27c8d3e42..69e102c8d 100644
--- a/syncopy/tests/misc.py
+++ b/syncopy/tests/misc.py
@@ -57,7 +57,7 @@ def is_slurm_node():
         return False
 
     
-def generate_artifical_data(nTrials=2, nChannels=2, equidistant=True,
+def generate_artificial_data(nTrials=2, nChannels=2, equidistant=True,
                             overlapping=False, inmemory=True, dimord="default"):
     """
     Populate `AnalogData` object w/ artificial signal
diff --git a/syncopy/tests/test_computationalroutine.py b/syncopy/tests/test_computationalroutine.py
index 3a7b1e782..724f99aaf 100644
--- a/syncopy/tests/test_computationalroutine.py
+++ b/syncopy/tests/test_computationalroutine.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-07-03 11:31:33
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-09-25 17:07:11>
+# Last modification time: <2019-10-11 13:47:16>
 
 import os
 import tempfile
@@ -20,7 +20,7 @@
 from syncopy.io import load
 from syncopy.shared.computational_routine import ComputationalRoutine
 from syncopy.shared.parsers import unwrap_io, unwrap_cfg
-from syncopy.tests.misc import generate_artifical_data
+from syncopy.tests.misc import generate_artificial_data
 
 # Decorator to decide whether or not to run dask-related tests
 skip_without_dask = pytest.mark.skipif(not __dask__, reason="dask not available")
@@ -182,7 +182,7 @@ def test_sequential_equidistant(self):
 
     def test_sequential_nonequidistant(self):
         for overlapping in [False, True]:
-            nonequidata = generate_artifical_data(nTrials=self.nTrials,
+            nonequidata = generate_artificial_data(nTrials=self.nTrials,
                                                   nChannels=self.nChannels,
                                                   equidistant=False,
                                                   overlapping=overlapping,
@@ -349,7 +349,7 @@ def test_parallel_equidistant(self, testcluster):
     def test_parallel_nonequidistant(self, testcluster):
         client = dd.Client(testcluster)
         for overlapping in [False, True]:
-            nonequidata = generate_artifical_data(nTrials=self.nTrials,
+            nonequidata = generate_artificial_data(nTrials=self.nTrials,
                                                     nChannels=self.nChannels,
                                                     equidistant=False,
                                                     overlapping=overlapping,
diff --git a/syncopy/tests/test_continuousdata.py b/syncopy/tests/test_continuousdata.py
index 43826f49b..b5a62896c 100644
--- a/syncopy/tests/test_continuousdata.py
+++ b/syncopy/tests/test_continuousdata.py
@@ -4,7 +4,7 @@
 # 
 # Created: 2019-03-20 11:46:31
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
-# Last modification time: <2019-09-25 13:17:32>
+# Last modification time: <2019-10-11 13:47:16>
 
 import os
 import tempfile
@@ -16,7 +16,7 @@
 from syncopy.io import save, load
 from syncopy.datatype.base_data import VirtualData
 from syncopy.shared.errors import SPYValueError, SPYTypeError
-from syncopy.tests.misc import generate_artifical_data, construct_spy_filename
+from syncopy.tests.misc import generate_artificial_data, construct_spy_filename
 
 
 class TestAnalogData():
@@ -317,7 +317,7 @@ def test_absolute_nextpow2_array_padding(self):
     def test_object_padding(self):
 
         # construct AnalogData object w/trials of unequal lengths
-        adata = generate_artifical_data(nTrials=7, nChannels=16,
+        adata = generate_artificial_data(nTrials=7, nChannels=16,
                                         equidistant=False, inmemory=False)
         timeAxis = adata.dimord.index("time")
 
@@ -333,7 +333,7 @@ def test_object_padding(self):
             assert trl_time - total_time < 1/adata.samplerate
 
         # jumble axes of `AnalogData` object and compute max. trial length
-        adata2 = generate_artifical_data(nTrials=7, nChannels=16,
+        adata2 = generate_artificial_data(nTrials=7, nChannels=16,
                                          equidistant=False, inmemory=False,
                                          dimord=adata.dimord[::-1])
         timeAxis2 = adata2.dimord.index("time")
diff --git a/syncopy/tests/test_specest.py b/syncopy/tests/test_specest.py
index fd70b2a9b..90cdb0f89 100644
--- a/syncopy/tests/test_specest.py
+++ b/syncopy/tests/test_specest.py
@@ -6,7 +6,7 @@
 # Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
 # Last modification time: <2019-07-15 12:05:40>
 
-from syncopy.tests.misc import generate_artifical_data
+from syncopy.tests.misc import generate_artificial_data
 from syncopy.specest.freqanalysis import freqanalysis
 from syncopy.shared.errors import SPYValueError
 from syncopy.datatype.data_methods import _nextpow2
@@ -162,7 +162,7 @@ def test_dpss(self):
             assert spec.channel.size == len(chanList)
 
         # non-equidistant data w/multiple tapers
-        artdata = generate_artifical_data(nTrials=5, nChannels=16,
+        artdata = generate_artificial_data(nTrials=5, nChannels=16,
                                           equidistant=False, inmemory=False)
         timeAxis = artdata.dimord.index("time")
         cfg = StructDict()
@@ -203,7 +203,7 @@ def test_dpss(self):
             assert np.max(spec.freq - freqs) < self.ftol
 
         # same + reversed dimensional order in input object
-        cfg.data = generate_artifical_data(nTrials=5, nChannels=16,
+        cfg.data = generate_artificial_data(nTrials=5, nChannels=16,
                                            equidistant=False, inmemory=False,
                                            dimord=AnalogData._defaultDimord[::-1])
         timeAxis = cfg.data.dimord.index("time")
@@ -239,7 +239,7 @@ def test_dpss(self):
             assert spec.taper.size > 1
 
         # same + overlapping trials
-        cfg.data = generate_artifical_data(nTrials=5, nChannels=16,
+        cfg.data = generate_artificial_data(nTrials=5, nChannels=16,
                                            equidistant=False, inmemory=False,
                                            dimord=AnalogData._defaultDimord[::-1],
                                            overlapping=True)
@@ -356,7 +356,7 @@ def test_parallel(self, testcluster):
 
         # simplest case: equidistant trial spacing, all in memory
         fileCount = [self.nTrials, nFiles]
-        artdata = generate_artifical_data(nTrials=self.nTrials, nChannels=self.nChannels,
+        artdata = generate_artificial_data(nTrials=self.nTrials, nChannels=self.nChannels,
                                           inmemory=True)
         for k, chan_per_worker in enumerate([None, chanPerWrkr]):
             cfg.chan_per_worker = chan_per_worker
@@ -366,7 +366,7 @@ def test_parallel(self, testcluster):
 
         # non-equidistant trial spacing
         cfg.keeptapers = False
-        artdata = generate_artifical_data(nTrials=self.nTrials, nChannels=self.nChannels,
+        artdata = generate_artificial_data(nTrials=self.nTrials, nChannels=self.nChannels,
                                           inmemory=True, equidistant=False)
         timeAxis = artdata.dimord.index("time")
         maxtrlno = np.diff(artdata.sampleinfo).argmax()
@@ -383,7 +383,7 @@ def test_parallel(self, testcluster):
         # equidistant trial spacing, keep tapers
         cfg.output = "abs"
         cfg.keeptapers = True
-        artdata = generate_artifical_data(nTrials=self.nTrials, nChannels=self.nChannels,
+        artdata = generate_artificial_data(nTrials=self.nTrials, nChannels=self.nChannels,
                                           inmemory=False)
         for k, chan_per_worker in enumerate([None, chanPerWrkr]):
             spec = freqanalysis(artdata, cfg)
@@ -392,7 +392,7 @@ def test_parallel(self, testcluster):
         # non-equidistant, overlapping trial spacing, throw away trials and tapers
         cfg.keeptapers = False
         cfg.keeptrials = "no"
-        artdata = generate_artifical_data(nTrials=self.nTrials, nChannels=self.nChannels,
+        artdata = generate_artificial_data(nTrials=self.nTrials, nChannels=self.nChannels,
                                           inmemory=False, equidistant=False,
                                           overlapping=True)
         spec = freqanalysis(artdata, cfg)
diff --git a/syncopy/tests/test_spyio.py b/syncopy/tests/test_spyio.py
index 289dc19da..dcdf48555 100644
--- a/syncopy/tests/test_spyio.py
+++ b/syncopy/tests/test_spyio.py
@@ -3,8 +3,8 @@
 # Test functionality of SyNCoPy-container I/O routines
 # 
 # Created: 2019-03-19 14:21:12
-# Last modified by: Joscha Schmiedt [joscha.schmiedt@esi-frankfurt.de]
-# Last modification time: <2019-09-09 11:37:28>
+# Last modified by: Stefan Fuertinger [stefan.fuertinger@esi-frankfurt.de]
+# Last modification time: <2019-10-11 13:47:16>
 
 import os
 import tempfile
@@ -22,7 +22,7 @@
 from syncopy.io.utils import FILE_EXT
 from syncopy.shared.errors import SPYValueError, SPYTypeError, SPYIOError, SPYError
 import syncopy.datatype as swd
-from syncopy.tests.misc import generate_artifical_data, construct_spy_filename
+from syncopy.tests.misc import generate_artificial_data, construct_spy_filename
 
 class TestSpyIO():
 
@@ -66,7 +66,7 @@ class TestSpyIO():
     def test_logging(self):
         with tempfile.TemporaryDirectory() as tdir:
             fname = os.path.join(tdir, "dummy")
-            dummy = generate_artifical_data(inmemory=True)
+            dummy = generate_artificial_data(inmemory=True)
             ldum = len(dummy._log)
             save(dummy, filename=fname)