diff --git a/.pylintrc b/.pylintrc index 0416a772d..11caeec8f 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,5 +1,25 @@ +# PyLint config for PyPhi code. +# +# Based on the PyLint config for Google's apitools: +# https://github.com/google/apitools/blob/master/default.pylintrc + [MASTER] +# Specify a configuration file. +# DEFAULT: rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +# DEFAULT: init-hook= + +# Profiled execution. +# DEFAULT: profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +# DEFAULT: ignore=CVS +# NOTE: This path must be relative due to the use of +# os.walk in astroid.modutils.get_module_files. ignore= __pycache__, __pyphi_cache__, @@ -12,8 +32,325 @@ ignore= htmlcov, benchmarks, +# Pickle collected data for later comparisons. +# DEFAULT: persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +# DEFAULT: load-plugins= + +# DEPRECATED +# DEFAULT: include-ids=no + +# DEPRECATED +# DEFAULT: symbols=no + + [MESSAGES CONTROL] -disable= - no-member, - invalid-name +# TODO: remove cyclic-import. +disable = + cyclic-import, + fixme, + import-error, + locally-disabled, + locally-enabled, + no-member, + no-name-in-module, + no-self-use, + super-on-old-class, + too-many-arguments, + too-many-function-args, + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +# DEFAULT: output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +# DEFAULT: files-output=no + +# Tells whether to display a full report or only the messages +# DEFAULT: reports=yes +# RATIONALE: run from Travis / tox, and don't need / want to parse output. +reports=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +# DEFAULT: evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +# DEFAULT: comment=no + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +# DEFAULT: min-similarity-lines=4 +min-similarity-lines=15 + +# Ignore comments when computing similarities. +# DEFAULT: ignore-comments=yes + +# Ignore docstrings when computing similarities. +# DEFAULT: ignore-docstrings=yes + +# Ignore imports when computing similarities. +# DEFAULT: ignore-imports=no +ignore-imports=yes + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +# DEFAULT: init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_) + + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +# DEFAULT: additional-builtins= + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +# DEFAULT: logging-modules=logging + + +[FORMAT] + +# Maximum number of characters on a single line. +# DEFAULT: max-line-length=79 + +# Regexp for a line that is allowed to be longer than the limit. +# DEFAULT: ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +# DEFAULT: single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled +# DEFAULT: no-space-check=trailing-comma,dict-separator +# RATIONALE: pylint ignores whitespace checks around the +# constructs "dict-separator" (cases like {1:2}) and +# "trailing-comma" (cases like {1: 2, }). +# By setting "no-space-check" to empty whitespace checks will be +# enforced around both constructs. +no-space-check = + +# Maximum number of lines in a module +# DEFAULT: max-module-lines=1000 +max-module-lines=1500 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +# DEFAULT: indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +# DEFAULT: indent-after-paren=4 + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +# DEFAULT: notes=FIXME,XXX,TODO + + +[BASIC] + +# Regular expression which should only match function or class names that do +# not require a docstring. +# DEFAULT: no-docstring-rgx=__.*__ +no-docstring-rgx=(__.*__|main) + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +# DEFAULT: docstring-min-length=-1 +docstring-min-length=10 + +# Regular expression which should only match correct module names. The +# leading underscore is sanctioned for private modules by Google's style +# guide. +module-rgx=^(_?[a-z][a-z0-9_]*)|__init__$ + +# Regular expression matching correct constant names +# DEFAULT: const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ +const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression matching correct class attribute names +# DEFAULT: class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ +class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression matching correct class names +# DEFAULT: class-rgx=[A-Z_][a-zA-Z0-9]+$ +class-rgx=^_?[A-Z][a-zA-Z0-9]*$ + +# Regular expression which should only match correct function names. +# 'camel_case' and 'snake_case' group names are used for consistency of naming +# styles across functions and methods. +function-rgx=^(?:(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ + +# Regular expression which should only match correct method names. +# 'camel_case' and 'snake_case' group names are used for consistency of naming +# styles across functions and methods. 'exempt' indicates a name which is +# consistent with all naming styles. +method-rgx=^(?:(?P__[a-z0-9_]+__|next)|(?P_{0,2}[A-Z][a-zA-Z0-9]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ + +# Regular expression matching correct attribute names +# DEFAULT: attr-rgx=[a-z_][a-z0-9_]{2,30}$ +attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ + +# Regular expression matching correct argument names +# DEFAULT: argument-rgx=[a-z_][a-z0-9_]{2,30}$ +argument-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression matching correct variable names +# DEFAULT: variable-rgx=[a-z_][a-z0-9_]{2,30}$ +variable-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression matching correct inline iteration names +# DEFAULT: inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ +inlinevar-rgx=^[a-z][a-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +# DEFAULT: good-names=i,j,k,ex,Run,_ +good-names=main,_ + +# Bad variable names which should always be refused, separated by a comma +# DEFAULT: bad-names=foo,bar,baz,toto,tutu,tata +bad-names= + +# List of builtins function names that should not be used, separated by a comma +# +bad-functions=input,apply,reduce + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +# DEFAULT: ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis +# DEFAULT: ignored-modules= + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +# DEFAULT: ignored-classes=SQLObject + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +# DEFAULT: zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +# DEFAULT: generated-members=REQUEST,acl_users,aq_parent + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +# DEFAULT: deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +# DEFAULT: import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +# DEFAULT: ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +# DEFAULT: int-import-graph= + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +# DEFAULT: ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +# DEFAULT: defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +# DEFAULT: valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +# DEFAULT: valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +# DEFAULT: max-args=5 +# RATIONALE: API-mapping +max-args = 14 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +# DEFAULT: ignored-argument-names=_.* + +# Maximum number of locals for function / method body +# DEFAULT: max-locals=15 +max-locals=24 + +# Maximum number of return / yield for function / method body +# DEFAULT: max-returns=6 +max-returns=9 + +# Maximum number of branch for function / method body +# DEFAULT: max-branches=12 +max-branches=21 + +# Maximum number of statements in function / method body +# DEFAULT: max-statements=50 + +# Maximum number of parents for a class (see R0901). +# DEFAULT: max-parents=7 + +# Maximum number of attributes for a class (see R0902). +# DEFAULT: max-attributes=7 +# RATIONALE: API mapping +max-attributes=19 + +# Minimum number of public methods for a class (see R0903). +# DEFAULT: min-public-methods=2 +# RATIONALE: context mgrs may have *no* public methods +min-public-methods=0 + +# Maximum number of public methods for a class (see R0904). +# DEFAULT: max-public-methods=20 +# RATIONALE: API mapping +max-public-methods=40 + +[ELIF] +max-nested-blocks=6 + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +# DEFAULT: overgeneral-exceptions=Exception diff --git a/CHANGELOG.md b/CHANGELOG.md index ee1d63e35..ddba3189e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,122 @@ Changelog ========= +1.0.0 :tada: +------------ + +### API changes + +#### Modules + +- Renamed: + - `compute.big_phi` to `compute.network` + - `compute.concept` to `compute.subsystem` + - `models.big_phi` to `models.subsystem` + - `models.concept` to `models.mechanism` + +#### Functions + +- Renamed: + - `compute.main_complex()` to `compute.major_complex()` + - `compute.big_mip()` to `compute.sia()` + - `compute.big_phi()` to `compute.phi()` + - `compute.constellation()` to `compute.ces()` + - `compute.conceptual_information()` to `compute.conceptual_info()` + - `subsystem.core_cause()` to `subsystem.mic()` + - `subsystem.core_effect()` to `subsystem.mie()` + - `subsystem.mip_past()` to `subsystem.cause_mip()` + - `subsystem.phi_mip_past()` to `subsystem.phi_cause_mip()` + - `subsystem.phi_mip_future()` to `subsystem.phi_effect_mip()` + - `distance.small_phi_measure()` to `distance.repertoire_distance()` + - `distance.big_phi_measure()` to `distance.system_repertoire_distance()` + - For all functions in `convert`: + - `loli` to `le` (little-endian) + - `holi` to `be` (big-endian) +- Removed `compute.concept()`; use `Subsystem.concept()` instead. + +#### Arguments + +- Renamed `connectivity_matrix` keyword argument of `Network()` to `cm` + +#### Objects + +- Renamed `BigMip` to `SystemIrreducibilityAnalysis` + - Renamed the `unpartitioned_constellation` attribute to `ces` + - `sia` is used throughout for attributes, variables, and function names + instead of `big_mip` +- Renamed `Mip` to `RepertoireIrreducibilityAnalysis` + - Renamed the `unpartitioned_repertoire` attribute to `repertoire` + - `ria` is used throughout for attributes, variables, and function names + instead of `mip` +- Renamed `Constellation` to `CauseEffectStructure` + - `ces` is used throughout for attributes, variables, and function names + instead of `constellation` +- Renamed `Mice` to `MaximallyIrreducibleCauseOrEffect` + - `mic` or `mie` are used throughout for attributes, variables, and function + names instead of `mip` + +- Similar changes were made to the `actual` and `models.actual_causation` +modules. + +#### Configuration settings + +- Changed configuration settings as necessary to use the new object names. + +#### Constants + +- Renamed `Direction.PAST` to `Direction.CAUSE` +- Renamed `Direction.CAUSE` to `Direction.EFFECT` + +### API additions + +#### Configuration settings + +- Added `CACHE_REPERTOIRES` to control whether cause/effect repertoires are + cached. Single-node cause/effect repertoires are always cached. +- Added `CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA` to control whether + subsystem caches are cleared after calling `compute.sia()`. + +#### Objects + +- Added two new objects, `MaximallyIrreducibleCause` and + `MaximallyIrreducibleEffect`, that are subclasses of + `MaximallyIrreducibleCauseOrEffect` with a fixed direction. + +### Refactor + +- Moved network-level functions in `compute.big_phi` to + `pyphi.compute.network` +- Moved subsystem-level functions in `compute.big_phi` and `compute.concept` to + `compute.subsystem` + +### Documentation + +- Added a description of TPM representations. +- Improved the explanation of conditional independence and updated the example + to reflect that PyPhi now raises an error if a conditionally-dependent TPM is + provided. +- Added detailed installation instructions. +- Little-endian and big-endian replace LOLI and HOLI terminology +- Added documentation for the following modules: + - `distribution` + - `cache` + - `compute.parallel` + - `compute` top-level module + - `module` top-level module + + 0.9.1 ----- ### Fixes - Refactored parallel processing support to fix an intermittent deadlock. + 0.9.0 ----- _2017-12-04_ -### API changes: +### API changes - Many functions have been refactored to different modules; see the "Refactor" section for details. - `compute.possible_complexes` no longer includes the empty subsystem. @@ -39,7 +144,7 @@ _2017-12-04_ `config.LOG_FILE_LEVEL` and `config.LOG_FILE`. - Removed the `location` property of `Concept`. -### API Additions +### API additions - Added `subsystem.evaluate_partition`. This returns the φ for a particular partition. - Added `config.MEASURE` to choose between EMD, KLD, or L1 for distance diff --git a/INSTALLATION.md b/INSTALLATION.md deleted file mode 100644 index b33272af3..000000000 --- a/INSTALLATION.md +++ /dev/null @@ -1,129 +0,0 @@ -Detailed installation guide for Mac OS X -======================================== - -This is a step-by-step guide intended for those unfamiliar with Python or the -command-line (*a.k.a.* the “shell”). - -A shell can be opened by opening a new tab in the Terminal app (located in -Utilities). Text that is `formatted like code` is meant to be copied and pasted -into the terminal (hit the Enter key to run the command). - -The fist step is to install the versions of Python that we need. The most -convenient way of doing this is to use the OS X package manager -[Homebrew](http://brew.sh/). Install Homebrew by running this command: - -```bash -ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" -``` - -Now you should have access to the `brew` command. First, we need to install -Python 2 and 3. Using these so-called “brewed” Python versions, rather than the -version of Python that comes with your computer, will protect your computer's -Python version from unwanted changes that could interfere with other -applications. - -```bash -brew install python python3 -``` - -Then we need to ensure that the terminal “knows about” the newly-installed -Python versions: - -```bash -brew link --overwrite python -brew link --overwrite python3 -``` - -Now that we're using our shiny new Python versions, it is highly recommended to -set up a **virtual environment** in which to install PyPhi. Virtual -environments allow different projects to isolate their dependencies from one -another, so that they don't interact in unexpected ways. Please see [this -guide](http://docs.python-guide.org/en/latest/dev/virtualenvs/) for more -information. - -To do this, you must install `virtualenvwrapper`, a [tool for manipulating -virtual environments](http://virtualenvwrapper.readthedocs.org/en/latest/). -This tool is available on [PyPI](https://pypi.python.org/pypi), the Python -package index, and can be installed with `pip`, the command-line utility for -installing and managing Python packages (`pip` was installed automatically with -the brewed Python): - -```bash -pip install virtualenvwrapper -``` - -Now we need to edit your shell startup file. This is a file that runs -automatically every time you open a new shell (a new window or tab in the -Terminal app). This file should be in your home directory, though it will be -invisible in the Finder because the filename is preceded by a period. On most -Macs it is called `.bash_profile`. You can open this in a text editor by -running this command: - -```bash -open -a TextEdit ~/.bash_profile -``` - -If this doesn't work because the file doesn't exist, then run `touch -~/.bash_profile` first. - -Now, you'll add three lines to the shell startup file. These lines will set the -location where the virtual environments will live, the location of your -development project directories, and the location of the script installed with -this package, respectively. **Note:** The location of the script can be found -by running `which virtualenvwrapper.sh`. - -The filepath after the equals sign on the second line will different for -everyone, but here is an example: - -```bash -export WORKON_HOME=$HOME/.virtualenvs -export PROJECT_HOME=$HOME/dev -source /usr/local/bin/virtualenvwrapper.sh -``` - -After editing the startup file and saving it, open a new terminal shell by -opening a new tab or window (or just reload the startup file by running `source -~/.bash_profile`). - -Now that `virtualenvwrapper` is fully installed, use it to create a Python 3 -virtual environment, like so: - -```bash -mkvirtualenv -p `which python3` -``` - -The option `` -p `which python3` `` ensures that when the virtual environment -is activated, the commands `python` and `pip` will refer to their Python 3 -counterparts. - -The virtual environment should have been activated automatically after creating -it. Virtual environments can be manually activated with `workon -`, and deactivated with `deactivate`. - -**Important:** Remember to activate the virtual environment with the `workon` -command **every time you begin working on your project**. Also, note that the -currently active virtual environment is *not* associated with any particular -folder; it is associated with a terminal shell. When a virtual environment is -active, your command-line prompt will be prepended with the name of the virtual -environment in parentheses. - -Once you've checked that the new virtual environment is active, you're finally -ready to install PyPhi into it (note that this may take a few minutes): -```bash -pip install pyphi -``` - -Congratulations, you've just installed PyPhi! - -To play around with the software, ensure that you've activated the virtual -environment with `workon `. Then run `python` to start a -Python 3 interpreter. Then, in the interpreter's command-line (which is -preceded by the `>>>` prompt), run - -```python -import pyphi -``` - -Please see the documentation for some -[examples](http://pythonhosted.org/pyphi/examples/index.html) and information -on how to [configure](http://pythonhosted.org/pyphi/configuration.html) it. diff --git a/INSTALLATION.rst b/INSTALLATION.rst new file mode 100644 index 000000000..77b9e5ef1 --- /dev/null +++ b/INSTALLATION.rst @@ -0,0 +1,141 @@ +.. _macos-installation: + +Detailed installation guide for macOS +===================================== + +This is a step-by-step guide intended for those unfamiliar with Python +or the command-line (*a.k.a.* the “shell”). + +A shell can be opened by opening a new tab in the Terminal app (located in +Utilities). Text that is ``formatted like code`` is meant to be copied and +pasted into the terminal (hit the Enter key to run the command). + +The fist step is to install the versions of Python that we need. The most +convenient way of doing this is to use the OS X package manager `Homebrew +`__. Install Homebrew by running this command: + +.. code:: bash + + /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" + +Now you should have access to the ``brew`` command. First, we need to install +Python 2 and 3. Using these so-called “brewed” Python versions, rather than the +version of Python that comes with your computer, will protect your computer's +Python version from unwanted changes that could interfere with other +applications. + +.. code:: bash + + brew install python python3 + +Then we need to ensure that the terminal “knows about” the newly-installed +Python versions: + +.. code:: bash + + brew link --overwrite python + brew link --overwrite python3 + +Now that we're using our shiny new Python versions, it is highly recommended to +set up a **virtual environment** in which to install PyPhi. Virtual +environments allow different projects to isolate their dependencies from one +another, so that they don't interact in unexpected ways. Please see `this guide +`__ for more information. + +To do this, you must install ``virtualenvwrapper``, a `tool for manipulating +virtual environments `__. This tool +is available on `PyPI `__, the Python package +index, and can be installed with ``pip``, the command-line utility for +installing and managing Python packages (``pip`` was installed automatically +with the brewed Python): + +.. code:: bash + + pip install virtualenvwrapper + +Now we need to edit your shell startup file. This is a file that runs +automatically every time you open a new shell (a new window or tab in the +Terminal app). This file should be in your home directory, though it will be +invisible in the Finder because the filename is preceded by a period. On most +Macs it is called ``.bash_profile``. You can open this in a text editor by +running this command: + +.. code:: bash + + open -a TextEdit ~/.bash_profile + +If you get an error that says the file doesn't exist, then run ``touch +~/.bash_profile`` first to create it. + +Now, you'll add three lines to the shell startup file. These lines will set the +location where the virtual environments will live, the location of your +development project directories, and the location of the script installed with +this package, respectively. **Note:** The location of the script can be found +by running ``which virtualenvwrapper.sh``. + +The filepath after the equals sign on the second line will different for +everyone, but here is an example: + +.. code:: bash + + export WORKON_HOME=$HOME/.virtualenvs + export PROJECT_HOME=$HOME/dev + source /usr/local/bin/virtualenvwrapper.sh + +After editing the startup file and saving it, open a new terminal shell by +opening a new tab or window (or just reload the startup file by running +``source ~/.bash_profile``). + +Now that ``virtualenvwrapper`` is fully installed, use it to create a Python 3 +virtual environment, like so: + +.. code:: bash + + mkvirtualenv -p `which python3` + +The option ``-p `which python3``` ensures that when the virtual environment is +activated, the commands ``python`` and ``pip`` will refer to their Python 3 +counterparts. + +The virtual environment should have been activated automatically after creating +it. Virtual environments can be manually activated with ``workon +``, and deactivated with ``deactivate``. + +**Important:** Remember to activate the virtual environment with the ``workon`` +command **every time you begin working on your project**. Also, note that the +currently active virtual environment is *not* associated with any particular +folder; it is associated with a terminal shell. In other words, each time you +open a new Terminal tab or terminal window, you need to run ``workon +`__). +When a virtual environment is active, your command-line prompt will be +prepended with the name of the virtual environment in parentheses. + +Once you've checked that the new virtual environment is active, you're finally +ready to install PyPhi into it (note that this may take a few minutes): + +.. code:: bash + + pip install pyphi + +Congratulations, you've just installed PyPhi! + +To play around with the software, ensure that you've activated the virtual +environment with ``workon ``. Then run ``python`` to +start a Python 3 interpreter. Then, in the interpreter's command-line (which is +preceded by the ``>>>`` prompt), run + +.. code:: python + + import pyphi + +Optionally, you can also install `IPython `__ with ``pip +install ipython`` to get a more useful Python interpreter that offers things +like tab-completion. Once you've installed it, you can start the IPython +interpreter with the command ``ipython``. + +Next, please see the documentation for some `examples +`__ of how to use PyPhi and +information on how to `configure +`__ it. diff --git a/Makefile b/Makefile index b87c6f135..86ac185a8 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,7 @@ docs_build = docs/_build docs_html = docs/_build/html benchmarks = benchmarks dist_dir = dist +docs_port = 1337 test: coverage watch-tests @@ -23,7 +24,7 @@ watch-tests: --patterns="*.py;*.rst" $(src) $(tests) $(docs) # TODO: watch test config files -docs: build-docs open-docs +docs: build-docs watch-docs: docs watchmedo shell-command \ @@ -39,8 +40,11 @@ build-docs: cp $(docs)/_static/*.css $(docs_html)/_static cp $(docs)/_static/*.png $(docs_html)/_static +serve-docs: build-docs + cd $(docs_html) && python -m http.server $(docs_port) + open-docs: - open $(docs_html)/index.html + open http://0.0.0.0:$(docs_port) upload-docs: build-docs cp -r $(docs_html) ../pyphi-docs diff --git a/README.rst b/README.rst index 96a6636dc..26f73fe46 100644 --- a/README.rst +++ b/README.rst @@ -1,15 +1,11 @@ .. raw:: html - PyPhi logo + PyPhi logo | -.. image:: https://img.shields.io/badge/DOI-10.5281%20%2F%20zenodo.636912-blue.svg?style=flat-square&maxAge=86400 - :target: https://doi.org/10.5281/zenodo.636912 - :alt: Zenodo DOI badge - -.. image:: https://readthedocs.org/projects/pyphi/badge/?version=latest&style=flat-square&maxAge=600 - :target: https://pyphi.readthedocs.io/en/latest/?badge=latest +.. image:: https://readthedocs.org/projects/pyphi/badge/?style=flat-square&maxAge=600 + :target: https://pyphi.readthedocs.io/ :alt: Documentation badge .. image:: https://img.shields.io/travis/wmayner/pyphi.svg?style=flat-square&maxAge=600 @@ -36,28 +32,6 @@ associated quantities and objects. **If you use this code, please cite it, as well as the** `IIT 3.0 paper `_. -To cite the code, use the Zenodo DOI for the verison you used. The latest one -is `10.5281/zenodo.636912 `_. -For example:: - - Mayner, William GP et al. (2017). PyPhi: 0.9.1. Zenodo. 10.5281/zenodo.636912 - -Or in BibTeX:: - - @misc{pyphi, - author = {Mayner, William Gerald Paul and - Marshall, William and - Marchman, Bo}, - title = {PyPhi: 0.9.1}, - month = Dec, - year = 2017, - doi = {10.5281/zenodo.636912}, - url = {http://dx.doi.org/10.5281/zenodo.636912} - } - -(Just make sure to use the version number, DOI, and URL for the version you -actually used.) - Usage, Examples, and API documentation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -87,10 +61,8 @@ have bugs, run: pip install "git+https://github.com/wmayner/pyphi@develop#egg=pyphi" -**Note:** this software has only been tested on the Mac OS X and Linux -operating systems. Windows is not supported, though it might work with minor -modifications. If you do get it to work, a writeup of the steps would be much -appreciated! +**Note:** this software is only supported on Linux and macOS. Windows is not +supported, though it might work with minor modifications. Detailed installation guide for Mac OS X @@ -106,7 +78,7 @@ For technical issues with PyPhi or feature requests, please use the `issues page `_. For discussion about the software or integrated information theory in general, -you can join the `PyPhi users group +you can join the `pyphi-users group `_. @@ -155,9 +127,9 @@ installing the requirements: Credits ~~~~~~~ -This code is based on a `previous project `_ -written in Matlab by L. Albantakis, M. Oizumi, A. Hashmi, A. Nere, U. Olces, P. -Rana, and B. Shababo. +This code is inspired by a `previous project +`_ written in Matlab by L. Albantakis, M. +Oizumi, A. Hashmi, A. Nere, U. Olces, P. Rana, and B. Shababo. Correspondence regarding the Matlab code and the IIT 3.0 paper (below) should be directed to Larissa Albantakis, PhD, at `albantakis@wisc.edu @@ -171,7 +143,6 @@ Mechanisms of Consciousness: Integrated Information Theory 3.0 `_. PLoS Comput Biol 10(5): e1003588. doi: 10.1371/journal.pcbi.1003588 - .. code:: latex @article{iit3, diff --git a/benchmarks/benchmarks/compute.py b/benchmarks/benchmarks/compute.py index 6489d7cfe..fe22a8f67 100644 --- a/benchmarks/benchmarks/compute.py +++ b/benchmarks/benchmarks/compute.py @@ -105,12 +105,12 @@ def setup(self, mode, network, cache): else: raise ValueError(cache) - config.CACHE_BIGMIPS = False + config.CACHE_SIAS = False def teardown(self, mode, network, cache): # Revert config config.__dict__.update(self.default_config) - def time_main_complex(self, mode, network, cache): + def time_major_complex(self, mode, network, cache): # Do it! - compute.main_complex(self.network, self.state) + compute.major_complex(self.network, self.state) diff --git a/benchmarks/benchmarks/subsystem.py b/benchmarks/benchmarks/subsystem.py index b7e328230..cb3a278eb 100644 --- a/benchmarks/benchmarks/subsystem.py +++ b/benchmarks/benchmarks/subsystem.py @@ -1,6 +1,7 @@ import copy from pyphi import Subsystem, compute, config, examples +from pyphi.direction import Direction """ @@ -82,7 +83,7 @@ def time_effect_repertoire_cache(self): def _do_potential_purviews(self): for i in range(100): - self.subsys.potential_purviews('past', self.idxs) + self.subsys.potential_purviews(Direction.CAUSE, self.idxs) def time_potential_purviews_no_cache(self): # Network purview caches disabled @@ -126,7 +127,7 @@ def setup(self, distance): config.PARALLEL_CUT_EVALUATION = False def time_L1_approximation(self, distance): - compute.main_complex(self.network, self.state) + compute.major_complex(self.network, self.state) def teardown(self, distance): config.__dict__.update(self.default_config) diff --git a/benchmarks/time_emd.py b/benchmarks/time_emd.py index bd746f5e5..f5b49111a 100644 --- a/benchmarks/time_emd.py +++ b/benchmarks/time_emd.py @@ -410,7 +410,7 @@ def patched_cause_emd(d1, d2): PARALLEL_CUT_EVALUATION=False): s = eights_complete() - phi = pyphi.compute.big_phi(s) + phi = pyphi.compute.phi(s) assert phi > 0 # Ensure meaningful computation report() diff --git a/conftest.py b/conftest.py index b3a266e31..27f9a4625 100644 --- a/conftest.py +++ b/conftest.py @@ -41,7 +41,7 @@ def pytest_runtest_setup(item): @pytest.fixture(scope='function') -def restore_config_afterwards(request): +def restore_config_afterwards(request): # pylint: disable=unused-argument '''Reset PyPhi configuration after a test. Useful for doctests that can't be decorated with `config.override`. diff --git a/docs/api/cache.rst b/docs/api/cache.rst new file mode 100644 index 000000000..6a75f7c4c --- /dev/null +++ b/docs/api/cache.rst @@ -0,0 +1,8 @@ +.. _cache: + +:mod:`cache` +============ + +.. automodule:: pyphi.cache + :members: + :undoc-members: diff --git a/docs/api/compute.big_phi.rst b/docs/api/compute.big_phi.rst deleted file mode 100644 index 1598c792b..000000000 --- a/docs/api/compute.big_phi.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _compute.big_phi: - -:mod:`compute.big_phi` -====================== - -.. automodule:: pyphi.compute.big_phi - :members: - :undoc-members: diff --git a/docs/api/compute.concept.rst b/docs/api/compute.concept.rst deleted file mode 100644 index 8d617aeae..000000000 --- a/docs/api/compute.concept.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _compute.concept: - -:mod:`compute.concept` -====================== - -.. automodule:: pyphi.compute.concept - :members: - :undoc-members: diff --git a/docs/api/compute.network.rst b/docs/api/compute.network.rst new file mode 100644 index 000000000..d62d4924d --- /dev/null +++ b/docs/api/compute.network.rst @@ -0,0 +1,8 @@ +.. _compute.network: + +:mod:`compute.network` +====================== + +.. automodule:: pyphi.compute.network + :members: + :undoc-members: diff --git a/docs/api/compute.parallel.rst b/docs/api/compute.parallel.rst new file mode 100644 index 000000000..da9375448 --- /dev/null +++ b/docs/api/compute.parallel.rst @@ -0,0 +1,8 @@ +.. _compute.parallel: + +:mod:`compute.parallel` +======================= + +.. automodule:: pyphi.compute.parallel + :members: + :undoc-members: diff --git a/docs/api/compute.rst b/docs/api/compute.rst new file mode 100644 index 000000000..205b54c1f --- /dev/null +++ b/docs/api/compute.rst @@ -0,0 +1,8 @@ +.. _compute: + +:mod:`compute` +============== + +.. automodule:: pyphi.compute + :members: + :undoc-members: diff --git a/docs/api/compute.subsystem.rst b/docs/api/compute.subsystem.rst new file mode 100644 index 000000000..25c3bac15 --- /dev/null +++ b/docs/api/compute.subsystem.rst @@ -0,0 +1,8 @@ +.. _compute.subsystem: + +:mod:`compute.subsystem` +======================== + +.. automodule:: pyphi.compute.subsystem + :members: + :undoc-members: diff --git a/docs/api/distribution.rst b/docs/api/distribution.rst new file mode 100644 index 000000000..0b859a7eb --- /dev/null +++ b/docs/api/distribution.rst @@ -0,0 +1,8 @@ +.. _distribution: + +:mod:`distribution` +=================== + +.. automodule:: pyphi.distribution + :members: + :undoc-members: diff --git a/docs/api/models.big_phi.rst b/docs/api/models.big_phi.rst deleted file mode 100644 index 59a67c89c..000000000 --- a/docs/api/models.big_phi.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _models.big_phi: - -:mod:`models.big_phi` -===================== - -.. automodule:: pyphi.models.big_phi - :members: - :undoc-members: diff --git a/docs/api/models.concept.rst b/docs/api/models.concept.rst deleted file mode 100644 index ca0fe000c..000000000 --- a/docs/api/models.concept.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _models.concept: - -:mod:`models.concept` -===================== - -.. automodule:: pyphi.models.concept - :members: - :undoc-members: diff --git a/docs/api/models.mechanism.rst b/docs/api/models.mechanism.rst new file mode 100644 index 000000000..c7e173f83 --- /dev/null +++ b/docs/api/models.mechanism.rst @@ -0,0 +1,8 @@ +.. _models.mechanism: + +:mod:`models.mechanism` +======================= + +.. automodule:: pyphi.models.mechanism + :members: + :undoc-members: diff --git a/docs/api/models.rst b/docs/api/models.rst new file mode 100644 index 000000000..a79528afa --- /dev/null +++ b/docs/api/models.rst @@ -0,0 +1,8 @@ +.. _models: + +:mod:`models` +============= + +.. automodule:: pyphi.models + :members: + :undoc-members: diff --git a/docs/api/models.subsystem.rst b/docs/api/models.subsystem.rst new file mode 100644 index 000000000..056a49420 --- /dev/null +++ b/docs/api/models.subsystem.rst @@ -0,0 +1,8 @@ +.. _models.subsystem: + +:mod:`models.subsystem` +======================= + +.. automodule:: pyphi.models.subsystem + :members: + :undoc-members: diff --git a/docs/conf.py b/docs/conf.py index 7e9a299d3..fd83a23d3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -62,7 +62,7 @@ # General information about the project. project = 'PyPhi' -copyright = '2014–2017 {}'.format(__author__) +copyright = '2014--2017 {}'.format(__author__) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -117,6 +117,7 @@ r""" .. |big_phi| replace:: :math:`\Phi` .. |big_phi > 0| replace:: :math:`\Phi > 0` +.. |big_phi = 0| replace:: :math:`\Phi = 0` .. |big_phi_max| replace:: :math:`\Phi^{\textrm{max}}` .. |small_phi| replace:: :math:`\varphi` .. |small_phi > 0| replace:: :math:`\varphi > 0` @@ -151,10 +152,11 @@ .. |t| replace:: :math:`t` .. |t-1| replace:: :math:`t-1` .. |t+1| replace:: :math:`t+1` +.. |n+1| replace:: :math:`n+1` .. |1,0,0| replace:: :math:`\{1,0,0\}` .. |0,1,0| replace:: :math:`\{0,1,0\}` .. |0,0,1| replace:: :math:`\{0,0,1\}` -.. |n0 = 0, n1 = 0, n2 = 1| replace:: :math:`(n_0 = 0, n_1 = 0, n_2 = 1)` +.. |N_0 = 0, N_1 = 0, N_2 = 1| replace:: :math:`N_0 = 0, N_1 = 0, N_2 = 1` .. |ith| replace:: :math:`i^{\textrm{th}}` .. |jth| replace:: :math:`j^{\textrm{th}}` .. |(i,j)| replace:: :math:`(i,j)` @@ -189,8 +191,8 @@ """, # Constants r""" -.. |PAST| replace:: :const:`~pyphi.direction.Direction.PAST` -.. |FUTURE| replace:: :const:`~pyphi.direction.Direction.FUTURE` +.. |CAUSE| replace:: :const:`~pyphi.direction.Direction.CAUSE` +.. |EFFECT| replace:: :const:`~pyphi.direction.Direction.EFFECT` .. |EPSILON| replace:: :const:`~pyphi.constants.EPSILON` .. |PICK_SMALLEST_PURVIEW| replace:: :const:`~pyphi.config.PICK_SMALLEST_PURVIEW` .. |PARTITION_TYPE| replace:: :const:`~pyphi.config.PARTITION_TYPE` @@ -199,16 +201,14 @@ # Modules r""" .. |compute| replace:: :mod:`~pyphi.compute` -.. |compute.distance| replace:: :mod:`~pyphi.compute.distance` -.. |compute.subsystems| replace:: :func:`~pyphi.compute.big_phi.subsystems` -.. |compute.possible_complexes| replace:: :func:`~pyphi.compute.big_phi.possible_complexes` -.. |compute.complexes| replace:: :func:`~pyphi.compute.big_phi.complexes` -.. |compute.all_complexes| replace:: :func:`~pyphi.compute.big_phi.all_complexes` -.. |compute.condensed| replace:: :func:`~pyphi.compute.big_phi.condensed` +.. |compute.distance| replace:: :mod:`pyphi.compute.distance` +.. |compute.network| replace:: :mod:`pyphi.compute.network` +.. |compute.parallel| replace:: :mod:`pyphi.compute.parallel` +.. |compute.subsystem| replace:: :mod:`pyphi.compute.subsystem` -.. |models.big_phi| replace:: :mod:`~pyphi.models.big_phi` -.. |models.concept| replace:: :mod:`~pyphi.models.concept` -.. |models.cuts| replace:: :mod:`~pyphi.models.cuts` +.. |models.subsystem| replace:: :mod:`pyphi.models.subsystem` +.. |models.mechanism| replace:: :mod:`pyphi.models.mechanism` +.. |models.cuts| replace:: :mod:`pyphi.models.cuts` .. |network| replace:: :mod:`~pyphi.network` .. |subsystem| replace:: :mod:`~pyphi.subsystem` @@ -224,16 +224,22 @@ """, # Functions r""" -.. |compute.conceptual_information| replace:: :func:`~pyphi.compute.big_phi.conceptual_information` -.. |compute.big_mip| replace:: :func:`~pyphi.compute.big_phi.big_mip` +.. |compute.conceptual_info()| replace:: :func:`~pyphi.compute.subsystem.conceptual_info` +.. |compute.sia()| replace:: :func:`~pyphi.compute.subsystem.sia` +.. |compute.phi()| replace:: :func:`~pyphi.compute.subsystem.phi` -.. |compute.concept| replace:: :func:`~pyphi.compute.concept.concept` -.. |compute.big_phi| replace:: :func:`~pyphi.compute.big_phi.big_phi` +.. |compute.subsystems()| replace:: :func:`~pyphi.compute.network.subsystems` +.. |compute.possible_complexes()| replace:: :func:`~pyphi.compute.network.possible_complexes` +.. |compute.complexes()| replace:: :func:`~pyphi.compute.network.complexes` +.. |compute.all_complexes()| replace:: :func:`~pyphi.compute.network.all_complexes` +.. |compute.condensed()| replace:: :func:`~pyphi.compute.network.condensed` -.. |configure_logging| replace:: :func:`~pyphi.config.configure_logging` +.. |Subsystem.clear_caches()| replace:: :func:`~pyphi.subsystem.Subsystem.clear_caches` -.. |loli_index2state| replace:: :func:`~pyphi.convert.loli_index2state` -.. |holi_index2state| replace:: :func:`~pyphi.convert.holi_index2state` +.. |configure_logging()| replace:: :func:`~pyphi.config.configure_logging` + +.. |le_index2state()| replace:: :func:`~pyphi.convert.le_index2state` +.. |be_index2state()| replace:: :func:`~pyphi.convert.be_index2state` """, # Classes r""" @@ -241,10 +247,11 @@ .. |Subsystem| replace:: :class:`~pyphi.subsystem.Subsystem` -.. |BigMip| replace:: :class:`~pyphi.models.big_phi.BigMip` +.. |SystemIrreducibilityAnalysis| replace:: :class:`~pyphi.models.subsystem.SystemIrreducibilityAnalysis` +.. |SIA| replace:: :class:`~pyphi.models.subsystem.SystemIrreducibilityAnalysis` +.. |CauseEffectStructure| replace:: :class:`~pyphi.models.subsystem.CauseEffectStructure` -.. |Concept| replace:: :class:`~pyphi.models.concept.Concept` -.. |Constellation| replace:: :class:`~pyphi.models.concept.Constellation` +.. |Concept| replace:: :class:`~pyphi.models.mechanism.Concept` .. |Cut| replace:: :class:`~pyphi.models.cuts.Cut` .. |Cuts| replace:: :class:`~pyphi.models.cuts.Cut` @@ -252,8 +259,11 @@ .. |Parts| replace:: :class:`~pyphi.models.cuts.Part` .. |Bipartition| replace:: :class:`~pyphi.models.cuts.Bipartition` -.. |Mip| replace:: :class:`~pyphi.models.concept.Mip` -.. |Mice| replace:: :class:`~pyphi.models.concept.Mice` +.. |RepertoireIrreducibilityAnalysis| replace:: :class:`~pyphi.models.mechanism.RepertoireIrreducibilityAnalysis` +.. |MaximallyIrreducibleCauseOrEffect| replace:: :class:`~pyphi.models.mechanism.MaximallyIrreducibleCauseOrEffect` +.. |MICE| replace:: :class:`~pyphi.models.mechanism.MaximallyIrreducibleCauseOrEffect` +.. |MIC| replace:: :class:`~pyphi.models.mechanism.MaximallyIrreducibleCause` +.. |MIE| replace:: :class:`~pyphi.models.mechanism.MaximallyIrreducibleEffect` .. |Node| replace:: :class:`~pyphi.node.Node` .. |Nodes| replace:: :class:`~pyphi.node.Node` @@ -265,8 +275,9 @@ .. |Blackbox| replace:: :class:`~pyphi.macro.Blackbox` .. |Transition| replace:: :class:`~pyphi.actual.Transition` -.. |AcBigMip| replace:: :class:`~pyphi.models.actual_causation.AcBigMip` -.. |AcMip| replace:: :class:`~pyphi.models.actual_causation.AcMip` + +.. |AcSystemIrreducibilityAnalysis| replace:: :class:`~pyphi.models.actual_causation.AcSystemIrreducibilityAnalysis` +.. |AcRepertoireIrreducibilityAnalysis| replace:: :class:`~pyphi.models.actual_causation.AcRepertoireIrreducibilityAnalysis` .. |DirectedAccount| replace:: :class:`~pyphi.models.actual_causation.DirectedAccount` .. |Account| replace:: :class:`~pyphi.models.actual_causation.Account` .. |Event| replace:: :class:`~pyphi.models.actual_causation.Event` @@ -275,13 +286,13 @@ .. |ConditionallyDependentError| replace:: :class:`~pyphi.exceptions.ConditionallyDependentError` -.. |MiceCache| replace:: :class:`~pyphi.cache.MiceCache` +.. |MICECache| replace:: :class:`~pyphi.cache.MICECache` """, # Methods r""" .. |Subsystem.concept| replace:: :meth:`~pyphi.subsystem.Subsystem.concept` -.. |Subsystem.core_cause| replace:: :meth:`~pyphi.subsystem.Subsystem.core_cause` -.. |Subsystem.core_effect| replace:: :meth:`~pyphi.subsystem.Subsystem.core_effect` +.. |Subsystem.mic| replace:: :meth:`~pyphi.subsystem.Subsystem.mic` +.. |Subsystem.mie| replace:: :meth:`~pyphi.subsystem.Subsystem.mie` .. |expand_repertoire| replace:: :meth:`~pyphi.subsystem.Subsystem.expand_repertoire` .. |find_mip| replace:: :meth:`~pyphi.subsystem.Subsystem.find_mip` .. |find_mice| replace:: :meth:`~pyphi.subsystem.Subsystem.find_mice` diff --git a/docs/conventions.rst b/docs/conventions.rst index 652660bb5..6af074345 100644 --- a/docs/conventions.rst +++ b/docs/conventions.rst @@ -1,90 +1,169 @@ -.. _cm-conventions: +.. _tpm-conventions: -Connectivity matrix conventions -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Transition probability matrix conventions +========================================= -Throughout PyPhi, if |CM| is a connectivity matrix, then |CM[i][j] = 1| means -that there is a directed edge |(i,j)| from node |i| to node |j|, and -|CM[i][j] = 0| means there is no edge from |i| to |j|. +A |Network| can be created with a transition probability matrix (TPM) in any of +the three forms described below. However, in PyPhi the canonical TPM +representation is **multidimensional state-by-node form**. The TPM will be +converted to this form when the |Network| is built. -For example, this network of four nodes +.. tip:: + Functions for converting TPMs from one form to another are available in the + |convert| module. -.. image:: _static/connectivity-matrix-example-network.png - :width: 150px -has the following connectivity matrix: +.. _state-by-node-form: - >>> cm = [[0, 0, 1, 0], - ... [1, 0, 1, 0], - ... [0, 1, 0, 1], - ... [0, 0, 0, 1]] +State-by-node form +~~~~~~~~~~~~~~~~~~ +A TPM in **state-by-node form** is a matrix where the entry |(i,j)| gives the +probability that the |jth| node will be ON at time |t+1| if the system is in +the |ith| state at time |t|. -.. _tpm-conventions: -Transition probability matrix conventions -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _multidimensional-state-by-node-form: + +Multidimensional state-by-node form +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A TPM in **multidimensional state-by-node** form is a state-by-node form that +has been reshaped so that it has |n+1| dimensions instead of two. The first |n| +dimensions correspond to each of the |n| nodes at time |t|, while the last +dimension corresponds to the probabilities of each node being ON at |t+1|. + +With this form, we can take advantage of `NumPy array indexing +`_ and use a +network state as an index directly: + + >>> from pyphi.examples import basic_noisy_selfloop_network + >>> tpm = basic_noisy_selfloop_network().tpm + >>> state = (0, 0, 1) # A network state is a binary tuple + >>> tpm[state] + array([ 0.919, 0.91 , 0.756]) + +This tells us that if the current state is |N_0 = 0, N_1 = 0, N_2 = 1|, then +the for the next state, :math:`P(N_0 = 1) = 0.919`, :math:`P(N_1 = 1) = 0.91` +and :math:`P(N_2 = 1) = 0.756`. + +.. important:: + The multidimensional state-by-node form is used throughout PyPhi, + regardless of the form that was used to create the |Network|. + +.. _state-by-state-form: + +State-by-state form +~~~~~~~~~~~~~~~~~~~ + +A TPM in **state-by-state form** is a matrix where the entry |(i,j)| gives the +probability that the state at time |t+1| will be |j| if the state at time |t| +is labeled by |i|. + +.. warning:: + **When converting a state-by-state TPM to one of the other forms, information + may be lost!** + + This is because the space of possible state-by-state TPMs is larger than + the space of state-by-node TPMs (so the conversion cannot be injective). + However, if we restrict the state-by-state TPMs to only those that satisfy + the conditional independence property, then the mapping becomes bijective. + + See :ref:`conditional-independence` for a more detailed discussion. + + +.. _little-endian-convention: + +Little-endian convention +~~~~~~~~~~~~~~~~~~~~~~~~ + +Even after choosing one of the above representations, there are several ways to +write down the TPM. -There are several ways to write down a transition probability matrix (TPM). With both state-by-state and state-by-node TPMs, one is confronted with a choice about which rows correspond to which states. In state-by-state TPMs, this choice must also be made for the columns. -Either the first node changes state every other row (**LOLI**): - - +--------------------+---------------------------------+ - | State at :math:`t` | :math:`P(N = 1)` at :math:`t+1` | - +--------------------+-----+---------------------------+ - | A, B | A | B | - +====================+=====+===========================+ - | (0, 0) | 0.1 | 0.2 | - +--------------------+-----+---------------------------+ - | (1, 0) | 0.3 | 0.4 | - +--------------------+-----+---------------------------+ - | (0, 1) | 0.5 | 0.6 | - +--------------------+-----+---------------------------+ - | (1, 1) | 0.7 | 0.8 | - +--------------------+-----+---------------------------+ - -Or the last node does (**HOLI**): - - +--------------------+---------------------------------+ - | State at :math:`t` | :math:`P(N = 1)` at :math:`t+1` | - +--------------------+-----+---------------------------+ - | A, B | A | B | - +====================+=====+===========================+ - | (0, 0) | 0.1 | 0.2 | - +--------------------+-----+---------------------------+ - | (0, 1) | 0.5 | 0.6 | - +--------------------+-----+---------------------------+ - | (1, 0) | 0.3 | 0.4 | - +--------------------+-----+---------------------------+ - | (1, 1) | 0.7 | 0.8 | - +--------------------+-----+---------------------------+ +There are two possible choices for the rows. Either the first node changes +state every other row: + + +--------------------+----------------------------------+ + | State at :math:`t` | :math:`P(N = ON)` at :math:`t+1` | + +--------------------+-----+----------------------------+ + | A, B | A | B | + +====================+=====+============================+ + | (0, 0) | 0.1 | 0.2 | + +--------------------+-----+----------------------------+ + | (1, 0) | 0.3 | 0.4 | + +--------------------+-----+----------------------------+ + | (0, 1) | 0.5 | 0.6 | + +--------------------+-----+----------------------------+ + | (1, 1) | 0.7 | 0.8 | + +--------------------+-----+----------------------------+ + +Or the last node does: + + +--------------------+----------------------------------+ + | State at :math:`t` | :math:`P(N = ON)` at :math:`t+1` | + +--------------------+-----+----------------------------+ + | A, B | A | B | + +====================+=====+============================+ + | (0, 0) | 0.1 | 0.2 | + +--------------------+-----+----------------------------+ + | (0, 1) | 0.5 | 0.6 | + +--------------------+-----+----------------------------+ + | (1, 0) | 0.3 | 0.4 | + +--------------------+-----+----------------------------+ + | (1, 1) | 0.7 | 0.8 | + +--------------------+-----+----------------------------+ Note that the index |i| of a row in a TPM encodes a network state: convert the index to binary, and each bit gives the state of a node. The question is, which node? -**Throughout PyPhi, we always choose the first convention—the state of the +**Throughout PyPhi, we always choose the first convention---the state of the first node (the one with the lowest index) varies the fastest.** So, the -lowest-order bit—the one's place—gives the state of the lowest-index node. +least-signficant bit---the one's place---gives the state of the lowest-index +node. -We call this convention the **LOLI convention**: Low Order bits correspond to -Low Index nodes. The other convention, where the highest-index node varies the -fastest, is similarly called **HOLI**. +This is analogous to the little-endian convention in organizing computer +memory. The other convention, where the highest-index node varies the fastest, +is analogous to the big-endian convention (see `Endianness +`_). -The rationale for this choice of convention is that the **LOLI** mapping is -stable under changes in the number of nodes, in the sense that the same bit -always corresponds to the same node index. The **HOLI** mapping does not have +The rationale for this choice of convention is that the little-endian mapping +is stable under changes in the number of nodes, in the sense that the same bit +always corresponds to the same node index. The big-endian mapping does not have this property. +.. tip:: + Functions to convert states to indices and vice versa, according to either + the little-endian or big-endian convention, are available in the |convert| + module. + .. note:: This applies to only situations where decimal indices are encoding states. Whenever a network state is represented as a list or tuple, we use the only sensible convention: the |ith| element gives the state of the |ith| node. -.. tip:: - There are various conversion functions available for converting between - TPMs, states, and indices using different conventions: see the - :mod:`pyphi.convert` module. + +.. _cm-conventions: + +Connectivity matrix conventions +=============================== + +Throughout PyPhi, if |CM| is a connectivity matrix, then |CM[i][j] = 1| means +that there is a directed edge |(i,j)| from node |i| to node |j|, and +|CM[i][j] = 0| means there is no edge from |i| to |j|. + +For example, this network of four nodes + +.. image:: _static/connectivity-matrix-example-network.png + :width: 150px + +has the following connectivity matrix: + + >>> cm = [[0, 0, 1, 0], + ... [1, 0, 1, 0], + ... [0, 1, 0, 1], + ... [0, 0, 0, 1]] diff --git a/docs/examples/2014paper.rst b/docs/examples/2014paper.rst index 5a9f091e2..ac61ca7fa 100644 --- a/docs/examples/2014paper.rst +++ b/docs/examples/2014paper.rst @@ -38,9 +38,8 @@ we'll start by defining its TPM. and a column for each state. However, in PyPhi, we use a more compact representation: **state-by-node** form, in which there is a row for each state, but a column for each node. The |i,jth| entry gives the probability - that the |jth| node is on in the |ith| state. For more information on how - TPMs are represented in PyPhi, see the documentation for the |network| - module and the explanation of :ref:`tpm-conventions`. + that the |jth| node is ON in the |ith| state. For more information on how + TPMs are represented in PyPhi, see :ref:`tpm-conventions`. In the figure, the TPM is shown only for the candidate set. We'll define the entire network's TPM. Also, nodes |D|, |E| and |F| are not assigned mechanisms; @@ -136,13 +135,11 @@ this network's connectivity matrix is Now we can pass the TPM and connectivity matrix as arguments to the network constructor: - >>> network = pyphi.Network(tpm, connectivity_matrix=cm) + >>> network = pyphi.Network(tpm, cm=cm) Now the network shown in the figure is stored in a variable called ``network``. You can find more information about the network object we just created by -running ``help(network)`` or by consulting the `API -`_ -documentation for |Network|. +running ``help(network)`` or by consulting the documentation for |Network|. The next step is to define the candidate set shown in the figure, consisting of nodes |A|, |B| and |C|. In PyPhi, a candidate set for |big_phi| evaluation is @@ -153,7 +150,7 @@ be included in the subsystem. So, we define our candidate set like so: >>> state = (1, 0, 0, 0, 1, 0) >>> ABC = pyphi.Subsystem(network, state, [0, 1, 2]) -For more information on the subsystem object, see the API documentation for +For more information on the subsystem object, see the documentation for |Subsystem|. That covers the basic workflow with PyPhi and introduces the two types of @@ -179,7 +176,7 @@ nodes. In this case, the subsystem is just the entire network. >>> A, B, C, D = subsystem.node_indices Since the connections are noisy, we see that |A = 1| is unselective; all -past states are equally likely: +previous states are equally likely: >>> subsystem.cause_repertoire((A,), (B, C, D)) array([[[[ 0.125, 0.125], @@ -220,7 +217,7 @@ singleton dimension with >>> cr = cr.squeeze() -and now we can see that the probability of |B|, |C|, and |D| having been all on +and now we can see that the probability of |B|, |C|, and |D| having been all ON is 1: >>> cr[(1, 1, 1)] @@ -242,7 +239,7 @@ The same as (B) but with |A = 0|: >>> A, B, C, D = subsystem.node_indices And here the cause repertoire is minimally selective, only ruling out the state -where |B|, |C|, and |D| were all on: +where |B|, |C|, and |D| were all ON: >>> subsystem.cause_repertoire((A,), (B, C, D)) array([[[[ 0.14285714, 0.14285714], @@ -403,8 +400,8 @@ irreducible to the information generated by its parts.** Here we demonstrate the functions that find the minimum information partition a mechanism over a purview: - >>> mip_c = subsystem.mip_past(ABC, ABC) - >>> mip_e = subsystem.mip_future(ABC, ABC) + >>> mip_c = subsystem.cause_mip(ABC, ABC) + >>> mip_e = subsystem.effect_mip(ABC, ABC) These objects contain the :math:`\varphi^{\textrm{MIP}}_{\textrm{cause}}` and :math:`\varphi^{\textrm{MIP}}_{\textrm{effect}}` values in their respective @@ -424,10 +421,10 @@ attributes: ─── ✕ ───── 1 0,2 -For more information on these objects, see the API documentation for the |Mip| -class, or use ``help(mip_c)``. +For more information on these objects, see the documentation for the +|RepertoireIrreducibilityAnalysis| class, or use ``help(mip_c)``. -Note that the minimal partition found for the past is +Note that the minimal partition found for the cause is .. math:: \frac{A^{c}}{\varnothing} \times \frac{BC^{c}}{ABC^{p}}, @@ -447,7 +444,7 @@ Figure 7 causes and integrated effects.** It is left as an exercise for the reader to use the subsystem methods -``mip_past`` and ``mip_future``, introduced in the previous section, to +``cause_mip`` and ``effect_mip``, introduced in the previous section, to demonstrate the points made in Figure 7. To avoid building TPMs and connectivity matrices by hand, you can use the @@ -473,16 +470,20 @@ the “core cause” specified by a mechanism.** >>> subsystem = pyphi.Subsystem(network, state, range(network.size)) >>> A, B, C = subsystem.node_indices -To find the core cause of a mechanism over all purviews, we just use the -subsystem method of that name: +In PyPhi, the “core cause” is called the *maximally-irreducible cause* (MIC). +To find the MIC of a mechanism over all purviews, use the |Subsystem.mic| +method: - >>> core_cause = subsystem.core_cause((B, C)) - >>> core_cause.phi + >>> mic = subsystem.mic((B, C)) + >>> mic.phi 0.333334 -For a detailed description of the objects returned by the -|Subsystem.core_cause| and |Subsystem.core_effect| methods, see the API -documentation for |Mice| or use ``help(subsystem.core_cause)``. +Similarly, the |Subsystem.mie| method returns the “core effect” or +*maximally-irreducible effect* (MIE). + +For a detailed description of the MIC and MIE objects returned by these +methods, see the documentation for |MIC| or use ``help(subsystem.mic)`` and +``help(subsystem.mie)``. Figure 9 @@ -493,15 +494,15 @@ Figure 9 This figure and the next few use the same network as in Figure 8, so we don't need to reassign the ``network`` and ``subsystem`` variables. -Together, the core cause and core effect of a mechanism specify a “concept.” In -PyPhi, this is represented by the |Concept| object. Concepts are computed using -the |Subsystem.concept| method of a subsystem: +Together, the MIC and MIE of a mechanism specify a *concept*. In PyPhi, this is +represented by the |Concept| object. Concepts are computed using the +|Subsystem.concept| method of a subsystem: >>> concept_A = subsystem.concept((A,)) >>> concept_A.phi 0.166667 -As usual, please consult the API documentation or use ``help(concept_A)`` for a +As usual, please consult the documentation or use ``help(concept_A)`` for a detailed description of the |Concept| object. @@ -513,16 +514,17 @@ of all concepts generated by a set of elements in a state.** For functions of entire subsystems rather than mechanisms within them, we use the |compute| module. In this figure, we see the constellation of concepts of -the powerset of :math:`ABC`'s mechanisms. We can compute the constellation of -the subsystem like so: +the powerset of |ABC|'s mechanisms. A constellation of concepts is +represented in PyPhi by a |CauseEffectStructure|. We can compute the +cause-effect structure of the subsystem like so: - >>> constellation = pyphi.compute.constellation(subsystem) + >>> ces = pyphi.compute.ces(subsystem) And verify that the |small_phi| values match: - >>> constellation.labeled_mechanisms - [['A'], ['B'], ['C'], ['A', 'B'], ['B', 'C'], ['A', 'B', 'C']] - >>> constellation.phis + >>> ces.labeled_mechanisms + (['A'], ['B'], ['C'], ['A', 'B'], ['B', 'C'], ['A', 'B', 'C']) + >>> ces.phis [0.166667, 0.166667, 0.25, 0.25, 0.333334, 0.499999] The null concept (the small black cross shown in concept-space) is available as @@ -539,9 +541,9 @@ Figure 11 (constellation of concepts).** Conceptual information can be computed using the function named, as you might -expect, |compute.conceptual_information|: +expect, |compute.conceptual_info()|: - >>> pyphi.compute.conceptual_information(subsystem) + >>> pyphi.compute.conceptual_info(subsystem) 2.1111089999999999 @@ -551,24 +553,24 @@ Figure 12 **Assessing the integrated conceptual information Φ of a constellation C.** To calculate :math:`\Phi^{\textrm{MIP}}` for a candidate set, we use the -function |compute.big_mip|: +function |compute.sia()|: - >>> big_mip = pyphi.compute.big_mip(subsystem) + >>> sia = pyphi.compute.sia(subsystem) The returned value is a large object containing the :math:`\Phi^{\textrm{MIP}}` -value, the minimal cut, the constellation of concepts of the whole set and that -of the partitioned set :math:`C_{\rightarrow}^{\textrm{MIP}}`, the total -calculation time, the calculation time for just the unpartitioned -constellation, a reference to the subsystem that was analyzed, and a reference -to the subsystem with the minimal unidirectional cut applied. For details see -the API documentation for |BigMip| or use ``help(big_mip)``. +value, the minimal cut, the cause-effect structure of the whole set and that of +the partitioned set :math:`C_{\rightarrow}^{\textrm{MIP}}`, the total +calculation time, the calculation time for just the unpartitioned cause-effect +structure, a reference to the subsystem that was analyzed, and a reference to +the subsystem with the minimal unidirectional cut applied. For details see the +documentation for |SystemIrreducibilityAnalysis| or use ``help(sia)``. We can verify that the :math:`\Phi^{\textrm{MIP}}` value and minimal cut are as shown in the figure: - >>> big_mip.phi + >>> sia.phi 1.9166650000000001 - >>> big_mip.cut + >>> sia.cut Cut [0, 1] ━━/ /━━➤ [2] .. note:: @@ -594,14 +596,14 @@ Figure 14 >>> network = pyphi.examples.fig14() >>> state = (1, 0, 0, 0, 1, 0) -To find the subsystem within a network that is the main complex, we use the -function of that name, which returns a |BigMip| object: +To find the subsystem within a network that is the major complex, we use the +function of that name, which returns a |SystemIrreducibilityAnalysis| object: - >>> main_complex = pyphi.compute.main_complex(network, state) + >>> major_complex = pyphi.compute.major_complex(network, state) And we see that the nodes in the complex are indeed |A|, |B|, and |C|: - >>> main_complex.subsystem.nodes + >>> major_complex.subsystem.nodes (A, B, C) @@ -612,10 +614,11 @@ Figure 15 complex.** You can use the visual interface at -http://integratedinformationtheory.org/calculate.html to view a constellation -in a 3D projection of qualia space. The network in the figure is already built -for you; click the **Load Example** button and select “IIT 3.0 Paper, Figure 1” -(this network is the same as the candidate set in Figure 1). +http://integratedinformationtheory.org/calculate.html to view a conceptual +structure structure in a 3D projection of qualia space. The network in the +figure is already built for you; click the **Load Example** button and select +“IIT 3.0 Paper, Figure 1” (this network is the same as the candidate set in +Figure 1). Figure 16 @@ -624,15 +627,15 @@ Figure 16 **A system can condense into a major complex and minor complexes that may or may not interact with it.** -For this figure, we omit nodes |H|, |I|, |J|, |K| and |L|, since the TPM -of the full 12-node network is very large, and the point can be illustrated -without them. +For this figure, we omit nodes :math:`H`, :math:`I`, :math:`J`, :math:`K` and +:math:`L`, since the TPM of the full 12-node network is very large, and the +point can be illustrated without them. >>> network = pyphi.examples.fig16() >>> state = (1, 0, 0, 1, 1, 1, 0) To find the maximal set of non-overlapping complexes that a network condenses -into, use |compute.condensed|: +into, use |compute.condensed()|: >>> condensed = pyphi.compute.condensed(network, state) @@ -640,8 +643,8 @@ We find that there are two complexes: the major complex |ABC| with :math:`\Phi \approx 1.92`, and a minor complex |FG| with :math:`\Phi \approx 0.069` (note that there is typo in the figure: |FG|'s |big_phi| value should be |0.069|). Furthermore, the program has been updated to only consider background -conditions of current states, not past states; as a result the minor complex -|DE| shown in the paper no longer exists. +conditions of current states, not previous states; as a result the minor +complex |DE| shown in the paper no longer exists. >>> len(condensed) 2 @@ -652,11 +655,9 @@ conditions of current states, not past states; as a result the minor complex ((F, G), 0.069445) There are several other functions available for working with complexes; see the -documentation for |compute.subsystems|, |compute.all_complexes|, -|compute.possible_complexes|, and |compute.complexes|. +documentation for |compute.subsystems()|, |compute.all_complexes()|, +|compute.possible_complexes()|, and |compute.complexes()|. -.. |H| replace:: :math:`H` -.. |L| replace:: :math:`L` .. |A = 1| replace:: :math:`A = 1` .. |A = 0| replace:: :math:`A = 0` .. |1.5| replace:: :math:`1.5` diff --git a/docs/examples/actual_causation.rst b/docs/examples/actual_causation.rst index 0ec8bcdb3..a3761a4c3 100644 --- a/docs/examples/actual_causation.rst +++ b/docs/examples/actual_causation.rst @@ -1,7 +1,8 @@ Actual Causation ================ -This section demonstrates how to use PyPhi to evaluate actual causation as described in +This section demonstrates how to use PyPhi to evaluate actual causation as +described in `Albantakis L, Marshall W, Hoel E, Tononi G (2017). What caused what? An irreducible account of actual causation. arXiv:1708.06716 [cs.AI] @@ -23,9 +24,9 @@ Configuration Before we begin we need to set some configuration values. The correct way of partitioning for actual causation is using the ``'ALL'`` partitions setting; -``'TRI'``-partitions are a reasonable approximation. In case of ties the smaller -purview should be chosen. IIT 3.0 style bipartitions will give incorrect -results. +``'TRI'``-partitions are a reasonable approximation. In case of ties the +smaller purview should be chosen. IIT 3.0 style bipartitions will give +incorrect results. >>> config.PARTITION_TYPE = 'TRI' >>> config.PICK_SMALLEST_PURVIEW = True @@ -34,7 +35,7 @@ When calculating a causal account of the transition between a set of elements |X| at time |t-1| and a set of elements |Y| at time |t|, with |X| and |Y| being subsets of the same system, the transition should be valid according to the system's TPM. However, the state of |X| at |t-1| does not necessarily need to -have a valid past state so we can disable state validation: +have a valid previous state so we can disable state validation: >>> config.VALIDATE_SUBSYSTEM_STATES = False @@ -61,8 +62,8 @@ network. >>> OR = 0 >>> AND = 1 -We want to observe both elements at |t-1| and |t|, with ``OR`` on and ``AND`` -off in both observations: +We want to observe both elements at |t-1| and |t|, with ``OR`` ON and ``AND`` +OFF in both observations: >>> X = Y = (OR, AND) >>> X_state = Y_state = (1, 0) @@ -117,10 +118,11 @@ We can evaluate |alpha| for a particular pair of occurences, as in Figure 3C. For example, to find the irreducible effect ratio of |{OR, AND} -> {OR, AND}|, we use the ``find_mip`` method: - >>> link = transition.find_mip(Direction.FUTURE, (OR, AND), (OR, AND)) + >>> link = transition.find_mip(Direction.EFFECT, (OR, AND), (OR, AND)) -This returns a |AcMip| object, with a number of useful properties. This -particular MIP is reducible, as we can see by checking the value of |alpha|: +This returns a |AcRepertoireIrreducibilityAnalysis| object, with a number of +useful properties. This particular MIP is reducible, as we can see by checking +the value of |alpha|: >>> link.alpha 0.0 @@ -137,7 +139,7 @@ Let's look at the MIP for the irreducible occurence |Y_t = {OR, AND}| constraining |X_t-1 = {OR, AND}| (Figure 3D). This candidate causal link has positive |alpha|: - >>> link = transition.find_mip(Direction.PAST, (OR, AND), (OR, AND)) + >>> link = transition.find_mip(Direction.CAUSE, (OR, AND), (OR, AND)) >>> link.alpha 0.169925 @@ -180,14 +182,14 @@ Irreducible Accounts The irreducibility of the causal account of our transition of interest can be evaluated using the following function: - >>> big_mip = actual.big_acmip(transition) - >>> big_mip.alpha + >>> sia = actual.sia(transition) + >>> sia.alpha 0.169925 As shown in Figure 4, the second order occurence |Y_t = {OR, AND = 10}| is destroyed by the MIP: - >>> big_mip.partitioned_account # doctest: +NORMALIZE_WHITESPACE + >>> sia.partitioned_account # doctest: +NORMALIZE_WHITESPACE Account (4 causal links) ************************ @@ -200,8 +202,8 @@ destroyed by the MIP: The partition of the MIP is available in the ``cut`` property: - >>> big_mip.cut # doctest: +NORMALIZE_WHITESPACE - KCut PAST + >>> sia.cut # doctest: +NORMALIZE_WHITESPACE + KCut CAUSE ∅ 0 1 ─── ✕ ─── ✕ ─── ∅ 0 1 @@ -212,7 +214,8 @@ To find all irreducible accounts within the transition of interest, use >>> all_accounts = actual.nexus(network, X_state, Y_state) This computes |big_alpha| for all permutations of of elements in |X_t-1| and -|Y_t| and returns a ``tuple`` of all |AcBigMip| objects with |big_alpha > 0|: +|Y_t| and returns a ``tuple`` of all |AcSystemIrreducibilityAnalysis| objects +with |big_alpha > 0|: >>> for n in all_accounts: ... print(n.transition, n.alpha) diff --git a/docs/examples/conditional_independence.rst b/docs/examples/conditional_independence.rst index e5a2b7684..cd8b1f424 100644 --- a/docs/examples/conditional_independence.rst +++ b/docs/examples/conditional_independence.rst @@ -1,21 +1,33 @@ +.. _conditional-independence: + Conditional Independence ======================== +Conditional independence is the property of a TPM that *each node's state at +time* |t+1| *must be independent of the state of the others, given the state of +the network at time* |t|: + +.. math:: + P(S_{t+1} \mid S_t = s_t) \;= \prod_{N \,\in\, S} P(N_{t+1} \mid S_t = s_t) + \;, \quad \forall \; s_t \in S. + This example explores the assumption of conditional independence, and the behaviour of the program when it is not satisfied. Every state-by-node TPM corresponds to a unique state-by-state TPM which -satisfies the conditional independence assumption. If a state-by-node TPM is -given as input for a network, the program assumes that it is from a system with -the corresponding conditionally independent state-by-state TPM. - -When a state-by-state TPM is given as input for a network, the state-by-state -TPM is first converted to a state-by-node TPM. The program then assumes that -the system corresponds to the unique conditionally independent representation -of the state-by-node TPM. **If a non-conditionally independent TPM is given, -the analyzed system will not correspond to the original TPM**. Note that every -deterministic state-by-state TPM will automatically satisfy the conditional -independence assumption. +satisfies the conditional independence property (see :ref:`tpm-conventions` for +a discussion of the different TPM forms). If a state-by-node TPM is given as +input for a |Network|, PyPhi assumes that it is from a system with the +corresponding conditionally independent state-by-state TPM. + +When a state-by-state TPM is given as input for a |Network|, the state-by-state +TPM is first converted to a state-by-node TPM. PyPhi then assumes that the +system corresponds to the unique conditionally independent representation of +the state-by-node TPM. + +.. note:: + Every **deterministic** state-by-state TPM satisfies the conditional + independence property. Consider a system of two binary nodes (|A| and |B|) which do not change if they have the same value, but flip with probability 50% if they have different @@ -23,101 +35,99 @@ values. We'll load the state-by-state TPM for such a system from the |examples| module: - >>> import pyphi - >>> tpm = pyphi.examples.cond_depend_tpm() - >>> print(tpm) - [[ 1. 0. 0. 0. ] - [ 0. 0.5 0.5 0. ] - [ 0. 0.5 0.5 0. ] - [ 0. 0. 0. 1. ]] - -This system does not satisfy the conditional independence assumption; given a -past state of ``(1, 0)``, the current state of node |A| depends on whether or -not |B| has flipped. - -When creating a network, the program will convert this state-by-state TPM to a -state-by-node form, and issue a warning if it does not satisfy the assumption: - - >>> sbn_tpm = pyphi.convert.state_by_state2state_by_node(tpm) - -“The TPM is not conditionally independent. See the conditional independence -example in the documentation for more information on how this is handled.” - - >>> print(sbn_tpm) - [[[ 0. 0. ] - [ 0.5 0.5]] - - [[ 0.5 0.5] - [ 1. 1. ]]] - -The program will continue with the state-by-node TPM, but since it assumes -conditional independence, the network will not correspond to the original -system. - -To see the corresponding conditionally independent TPM, convert the -state-by-node TPM back to state-by-state form: - - >>> sbs_tpm = pyphi.convert.state_by_node2state_by_state(sbn_tpm) - >>> print(sbs_tpm) - [[ 1. 0. 0. 0. ] - [ 0.25 0.25 0.25 0.25] - [ 0.25 0.25 0.25 0.25] - [ 0. 0. 0. 1. ]] - -A system which does not satisfy the conditional independence assumption -exhibits “instantaneous causality.” In such situations, there must be -additional exogenous variable(s) which explain the dependence. - -Consider the above example, but with the addition of a third node (|C|) which -is equally likely to be ON or OFF, and such that when nodes |A| and |B| are in -different states, they will flip when |C| is ON, but stay the same when |C| is -OFF. - - >>> tpm2 = pyphi.examples.cond_independ_tpm() - >>> print(tpm2) - [[ 0.5 0. 0. 0. 0.5 0. 0. 0. ] - [ 0. 0.5 0. 0. 0. 0.5 0. 0. ] - [ 0. 0. 0.5 0. 0. 0. 0.5 0. ] - [ 0. 0. 0. 0.5 0. 0. 0. 0.5] - [ 0.5 0. 0. 0. 0.5 0. 0. 0. ] - [ 0. 0. 0.5 0. 0. 0. 0.5 0. ] - [ 0. 0.5 0. 0. 0. 0.5 0. 0. ] - [ 0. 0. 0. 0.5 0. 0. 0. 0.5]] + >>> import pyphi + >>> tpm = pyphi.examples.cond_depend_tpm() + >>> print(tpm) + [[ 1. 0. 0. 0. ] + [ 0. 0.5 0.5 0. ] + [ 0. 0.5 0.5 0. ] + [ 0. 0. 0. 1. ]] + +This system does not satisfy the conditional independence property; given a +previous state of ``(1, 0)``, the current state of node |A| depends on whether +or not |B| has flipped. + +If a conditionally dependent TPM is used to create a |Network|, PyPhi will +raise an error: + + >>> network = pyphi.Network(tpm) + Traceback (most recent call last): + ... + pyphi.exceptions.ConditionallyDependentError: TPM is not conditionally independent. + See the conditional independence example in the documentation for more info. + +To see the conditionally independent TPM that corresponds to the conditionally +dependent TPM, convert it to state-by-node form and then back to state-by-state +form: + + >>> sbn_tpm = pyphi.convert.state_by_state2state_by_node(tpm) + >>> print(sbn_tpm) + [[[ 0. 0. ] + [ 0.5 0.5]] + + [[ 0.5 0.5] + [ 1. 1. ]]] + >>> sbs_tpm = pyphi.convert.state_by_node2state_by_state(sbn_tpm) + >>> print(sbs_tpm) + [[ 1. 0. 0. 0. ] + [ 0.25 0.25 0.25 0.25] + [ 0.25 0.25 0.25 0.25] + [ 0. 0. 0. 1. ]] + +A system which does not satisfy the conditional independence property exhibits +“instantaneous causality.” In such situations, there must be additional +exogenous variable(s) which explain the dependence. + +Now consider the above example, but with the addition of a third node (|C|) +which is equally likely to be ON or OFF, and such that when nodes |A| and |B| +are in different states, they will flip when |C| is ON, but stay the same when +|C| is OFF. + + >>> tpm2 = pyphi.examples.cond_independ_tpm() + >>> print(tpm2) + [[ 0.5 0. 0. 0. 0.5 0. 0. 0. ] + [ 0. 0.5 0. 0. 0. 0.5 0. 0. ] + [ 0. 0. 0.5 0. 0. 0. 0.5 0. ] + [ 0. 0. 0. 0.5 0. 0. 0. 0.5] + [ 0.5 0. 0. 0. 0.5 0. 0. 0. ] + [ 0. 0. 0.5 0. 0. 0. 0.5 0. ] + [ 0. 0.5 0. 0. 0. 0.5 0. 0. ] + [ 0. 0. 0. 0.5 0. 0. 0. 0.5]] The resulting state-by-state TPM now satisfies the conditional independence -assumption. - - >>> sbn_tpm2 = pyphi.convert.state_by_state2state_by_node(tpm2) - >>> print(sbn_tpm2) - [[[[ 0. 0. 0.5] - [ 0. 0. 0.5]] - - [[ 0. 1. 0.5] - [ 1. 0. 0.5]]] - - - [[[ 1. 0. 0.5] - [ 0. 1. 0.5]] - - [[ 1. 1. 0.5] - [ 1. 1. 0.5]]]] +property. + + >>> sbn_tpm2 = pyphi.convert.state_by_state2state_by_node(tpm2) + >>> print(sbn_tpm2) + [[[[ 0. 0. 0.5] + [ 0. 0. 0.5]] + + [[ 0. 1. 0.5] + [ 1. 0. 0.5]]] + + + [[[ 1. 0. 0.5] + [ 0. 1. 0.5]] + + [[ 1. 1. 0.5] + [ 1. 1. 0.5]]]] The node indices are ``0`` and ``1`` for |A| and |B|, and ``2`` for |C|: - >>> AB = [0, 1] - >>> C = [2] + >>> AB = [0, 1] + >>> C = [2] From here, if we marginalize out the node |C|; - >>> tpm2_marginalizeC = pyphi.tpm.marginalize_out(C, sbn_tpm2) + >>> tpm2_marginalizeC = pyphi.tpm.marginalize_out(C, sbn_tpm2) And then restrict the purview to only nodes |A| and |B|; - >>> import numpy as np - >>> tpm2_purviewAB = np.squeeze(tpm2_marginalizeC[:,:,:,AB]) + >>> import numpy as np + >>> tpm2_purviewAB = np.squeeze(tpm2_marginalizeC[:,:,:,AB]) We get back the original state-by-node TPM from the system with just |A| and |B|. - >>> np.all(tpm2_purviewAB == sbn_tpm) - True + >>> np.all(tpm2_purviewAB == sbn_tpm) + True diff --git a/docs/examples/emergence.rst b/docs/examples/emergence.rst index 8f3aa8888..7d5c243b4 100644 --- a/docs/examples/emergence.rst +++ b/docs/examples/emergence.rst @@ -15,21 +15,21 @@ available from the |examples| module. The connectivity matrix is all-to-all: - >>> network.connectivity_matrix + >>> network.cm array([[ 1., 1., 1., 1.], [ 1., 1., 1., 1.], [ 1., 1., 1., 1.], [ 1., 1., 1., 1.]]) -We'll set the state so that nodes are off. +We'll set the state so that nodes are OFF. >>> state = (0, 0, 0, 0) -At the “micro” spatial scale, we can compute the main complex, and determine +At the “micro” spatial scale, we can compute the major complex, and determine the |big_phi| value: - >>> main_complex = pyphi.compute.main_complex(network, state) - >>> main_complex.phi + >>> major_complex = pyphi.compute.major_complex(network, state) + >>> major_complex.phi 0.113889 The question is whether there are other spatial scales which have greater @@ -58,12 +58,12 @@ We must then determine the relationship between micro-elements and macro-elements. When coarse-graining the system we assume that the resulting macro-elements do not differentiate the different micro-elements. Thus any correspondence between states must be stated solely in terms of the number of -micro-elements which are on, and not depend on which micro-elements are on. +micro-elements which are ON, and not depend on which micro-elements are ON. For example, consider the macro-element ``(0, 1, 2)``. We may say that the -macro-element is on if at least one micro-element is on, or if all -micro-elements are on; however, we may not say that the macro-element is on if -micro-element ``1`` is on, because this relationship involves identifying +macro-element is ON if at least one micro-element is ON, or if all +micro-elements are ON; however, we may not say that the macro-element is ON if +micro-element ``1`` is ON, because this relationship involves identifying specific micro-elements. The ``grouping`` attribute of the |CoarseGrain| describes how the state of @@ -78,15 +78,15 @@ The grouping consists of two lists, one for each macro-element: >>> grouping[0] ((0, 1, 2), (3,)) -For the first macro-element, this grouping means that the element will be off -if zero, one or two of its micro-elements are on, and will be on if all three -micro-elements are on. +For the first macro-element, this grouping means that the element will be OFF +if zero, one or two of its micro-elements are ON, and will be ON if all three +micro-elements are ON. >>> grouping[1] ((0,), (1,)) -For the second macro-element, the grouping means that the element will be off -if its micro-element is off, and on if its micro-element is on. +For the second macro-element, the grouping means that the element will be OFF +if its micro-element is OFF, and ON if its micro-element is ON. One we have selected a partition and grouping for analysis, we can create a mapping between micro-states and macro-states: @@ -95,23 +95,23 @@ mapping between micro-states and macro-states: >>> mapping array([0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 3]) -The interpretation of the mapping uses the LOLI convention of indexing (see -:ref:`tpm-conventions`). +The interpretation of the mapping uses the little-endian convention of indexing +(see :ref:`little-endian-convention`). >>> mapping[7] 1 This says that micro-state 7 corresponds to macro-state 1: - >>> pyphi.convert.loli_index2state(7, 4) + >>> pyphi.convert.le_index2state(7, 4) (1, 1, 1, 0) - >>> pyphi.convert.loli_index2state(1, 2) + >>> pyphi.convert.le_index2state(1, 2) (1, 0) In micro-state 7, all three elements corresponding to the first macro-element -are on, so that macro-element is on. The micro-element corresponding to the -second macro-element is off, so that macro-element is off. +are ON, so that macro-element is ON. The micro-element corresponding to the +second macro-element is OFF, so that macro-element is OFF. The |CoarseGrain| object uses the mapping internally to create a state-by-state TPM for the macro-system corresponding to the selected partition and grouping @@ -155,8 +155,8 @@ We can now construct a |MacroSubsystem| using this coarse-graining: We can then consider the integrated information of this macro-network and compare it to the micro-network. - >>> macro_mip = pyphi.compute.big_mip(macro_subsystem) - >>> macro_mip.phi + >>> macro_sia = pyphi.compute.sia(macro_subsystem) + >>> macro_sia.phi 0.597212 The integrated information of the macro subsystem (:math:`\Phi = 0.597212`) is @@ -192,7 +192,7 @@ using blackboxing. >>> import pyphi >>> network = pyphi.examples.blackbox_network() -We consider the state where all nodes are off: +We consider the state where all nodes are OFF: >>> state = (0, 0, 0, 0, 0, 0) >>> all_nodes = (0, 1, 2, 3, 4, 5) @@ -200,7 +200,7 @@ We consider the state where all nodes are off: The system has minimal |big_phi| without blackboxing: >>> subsys = pyphi.Subsystem(network, state, all_nodes) - >>> pyphi.compute.big_phi(subsys) + >>> pyphi.compute.phi(subsys) 0.215278 We will consider the blackbox system consisting of two blackbox elements, |ABC| @@ -247,11 +247,13 @@ Let us also define a time scale over which to perform our analysis: As in the coarse-graining example, the blackbox and time scale are passed to |MacroSubsystem|: - >>> macro_subsystem = pyphi.macro.MacroSubsystem(network, state, all_nodes, blackbox=blackbox, time_scale=time_scale) + >>> macro_subsystem = pyphi.macro.MacroSubsystem(network, state, all_nodes, + ... blackbox=blackbox, + ... time_scale=time_scale) We can now compute |big_phi| for this macro system: - >>> pyphi.compute.big_phi(macro_subsystem) + >>> pyphi.compute.phi(macro_subsystem) 0.638888 We find that the macro subsystem has greater integrated information diff --git a/docs/examples/index.rst b/docs/examples/index.rst index f87ae5d88..5d2fda46e 100644 --- a/docs/examples/index.rst +++ b/docs/examples/index.rst @@ -48,7 +48,7 @@ to read. Now we construct the network itself with the arguments we just created: - >>> network = pyphi.Network(tpm, connectivity_matrix=cm, + >>> network = pyphi.Network(tpm, cm=cm, ... node_labels=labels) The next step is to define a subsystem for which we want to evaluate |big_phi|. @@ -73,28 +73,28 @@ every node in the network in our subsystem: >>> pyphi.Subsystem(network, state, ('B', 'C')) Subsystem(B, C) -Now we use |compute.big_phi| function to compute the |big_phi| of our +Now we use the |compute.phi()| function to compute the |big_phi| of our subsystem: - >>> pyphi.compute.big_phi(subsystem) + >>> pyphi.compute.phi(subsystem) 2.3125 If we want to take a deeper look at the integrated-information-theoretic properties of our network, we can access all the intermediate quantities and structures that are calculated in the course of arriving at a final |big_phi| -value by using |compute.big_mip|. This returns a nested object, |BigMip|, that -contains data about the subsystem's constellation of concepts, cause and effect -repertoires, etc. +value by using |compute.sia()|. This returns a nested object, +|SystemIrreducibilityAnalysis|, that contains data about the subsystem's +cause-effect structure, cause and effect repertoires, etc. - >>> mip = pyphi.compute.big_mip(subsystem) + >>> sia = pyphi.compute.sia(subsystem) For instance, we can see that this network has 4 concepts: - >>> len(mip.unpartitioned_constellation) + >>> len(sia.ces) 4 -See the documentation for |BigMip| and |Concept| for more information on these -objects. +See the documentation for |SystemIrreducibilityAnalysis| and |Concept| for more +information on these objects. .. tip:: The network and subsystem discussed here are returned by the diff --git a/docs/examples/magic_cut.rst b/docs/examples/magic_cut.rst index 007458f6a..954ad9e31 100644 --- a/docs/examples/magic_cut.rst +++ b/docs/examples/magic_cut.rst @@ -7,14 +7,14 @@ this example is to highlight an unexpected behaviour of system cuts: that the minimum information partition of a system can result in new concepts being created. -First let's create the the Rule 110 network, with all nodes off in the current +First let's create the the Rule 110 network, with all nodes OFF in the current state. >>> import pyphi >>> network = pyphi.examples.rule110_network() >>> state = (0, 0, 0) -Next, we want to identify the spatial scale and main complex of the network: +Next, we want to identify the spatial scale and major complex of the network: >>> macro = pyphi.macro.emergence(network, state) >>> print(macro.emergence) @@ -22,20 +22,20 @@ Next, we want to identify the spatial scale and main complex of the network: Since the emergence value is negative, there is no macro scale which has greater integrated information than the original micro scale. We can now -analyze the micro scale to determine the main complex of the system: +analyze the micro scale to determine the major complex of the system: - >>> main_complex = pyphi.compute.main_complex(network, state) - >>> main_complex.subsystem + >>> major_complex = pyphi.compute.major_complex(network, state) + >>> major_complex.subsystem Subsystem(A, B, C) - >>> print(main_complex.phi) + >>> print(major_complex.phi) 1.35708 -The main complex of the system contains all three nodes of the system, and it +The major complex of the system contains all three nodes of the system, and it has integrated information :math:`\Phi = 1.35708`. Now that we have identified -the main complex of the system, we can explore its conceptual structure and the -effect of the MIP. +the major complex of the system, we can explore its cause-effect structure and +the effect of the MIP. - >>> constellation = main_complex.unpartitioned_constellation + >>> ces = major_complex.ces There two equivalent cuts for this system; for concreteness we sever all connections from elements |A| and |B| to |C|. @@ -43,64 +43,65 @@ connections from elements |A| and |B| to |C|. >>> cut = pyphi.models.Cut(from_nodes=(0, 1), to_nodes=(2,)) >>> cut_subsystem = pyphi.Subsystem(network, state, range(network.size), ... cut=cut) - >>> cut_constellation = pyphi.compute.constellation(cut_subsystem) + >>> cut_ces = pyphi.compute.ces(cut_subsystem) -Let's investigate the concepts in the unpartitioned constellation, +Let's investigate the concepts in the unpartitioned cause-effect structure, - >>> constellation.labeled_mechanisms - [['A'], ['B'], ['C'], ['A', 'B'], ['A', 'C'], ['B', 'C']] - >>> constellation.phis + >>> ces.labeled_mechanisms + (['A'], ['B'], ['C'], ['A', 'B'], ['A', 'C'], ['B', 'C']) + >>> ces.phis [0.125, 0.125, 0.125, 0.499999, 0.499999, 0.499999] >>> print(sum(_)) 1.874997 -and also the concepts of the partitioned constellation. +and also the concepts of the partitioned cause-effect structure. - >>> cut_constellation.labeled_mechanisms - [['A'], ['B'], ['C'], ['A', 'B'], ['B', 'C'], ['A', 'B', 'C']] - >>> cut_constellation.phis + >>> cut_ces.labeled_mechanisms + (['A'], ['B'], ['C'], ['A', 'B'], ['B', 'C'], ['A', 'B', 'C']) + >>> cut_ces.phis [0.125, 0.125, 0.125, 0.499999, 0.266666, 0.333333] >>> print(sum(_)) 1.474998 -The unpartitioned constellation includes all possible first and second order -concepts, but there is no third order concept. After applying the cut and +The unpartitioned cause-effect structure includes all possible first and second +order concepts, but there is no third order concept. After applying the cut and severing the connections from |A| and |B| to |C|, the third order concept |ABC| is created and the second order concept |AC| is destroyed. The overall amount of |small_phi| in the system decreases from :math:`1.875` to :math:`1.475`. Let's explore the concept which was created to determine why it does not exist -in the unpartitioned constellation and what changed in the partitioned -constellation. +in the unpartitioned cause-effect structure and what changed in the partitioned +cause-effect structure. - >>> subsystem = main_complex.subsystem + >>> subsystem = major_complex.subsystem >>> ABC = subsystem.node_indices >>> subsystem.cause_info(ABC, ABC) 0.749999 >>> subsystem.effect_info(ABC, ABC) 1.875 -The mechanism has cause and effect power over the system, so it must be that -this power is reducible. +The mechanism does have cause and effect power over the system. But, since it +doesn't specify a concept, it must be that this power is reducible: - >>> mice_cause = subsystem.core_cause(ABC) - >>> mice_cause.phi + >>> mic = subsystem.mic(ABC) + >>> mic.phi 0.0 - >>> mice_effect = subsystem.core_effect(ABC) - >>> mice_effect.phi + >>> mie = subsystem.mie(ABC) + >>> mie.phi 0.625 The reason ABC does not exist as a concept is that its cause is reducible. -Looking at the TPM of the system, there are no possible states with two of the -elements set to off. This means that knowing two elements are off is enough to -know that the third element must also be off, and thus the third element can +Looking at the TPM of the system, there are no possible states where two +elements are OFF. This means that knowing two elements are OFF is enough to +know that the third element must also be OFF, and thus the third element can always be cut from the concept without a loss of information. This will be true for any purview, so the cause information is reducible. >>> BC = (1, 2) >>> A = (0,) >>> repertoire = subsystem.cause_repertoire(ABC, ABC) - >>> cut_repertoire = subsystem.cause_repertoire(BC, ABC) * subsystem.cause_repertoire(A, ()) + >>> cut_repertoire = (subsystem.cause_repertoire(BC, ABC) * + ... subsystem.cause_repertoire(A, ())) >>> pyphi.distance.hamming_emd(repertoire, cut_repertoire) 0.0 @@ -111,20 +112,22 @@ into existence. >>> C = (2,) >>> AB = (0, 1) -The cut applied to the subsystem severs the connections from |A| and |B| to -|C|. In this circumstance, knowing |A| and |B| do not tell us anything about -the state of |C|, only the past state of |C| can tell us about the future state -of |C|. Here, ``past_tpm[1]`` gives us the probability of C being on in the -next state, while ``past_tpm[0]`` would give us the probability of C being off. +The cut applied to the subsystem severs the connections going to |C| from +either |A| or |B|. In this circumstance, knowing the state of |A| or |B| does +not tell us anything about the state of |C|; only the previous state of |C| can +tell us about the next state of |C|. ``C_node.tpm_on`` gives us the probability +of |C| being ON in the next state, while ``C_node.tpm_off`` would give us the +probability of |C| being OFF. >>> C_node = cut_subsystem.indices2nodes(C)[0] >>> C_node.tpm_on.flatten() array([ 0.5 , 0.75]) -This states that A has a 50% chance of being on in the next state if it -currently off, but a 75% chance of being on in the next state if it is -currently on. Thus unlike the unpartitioned case, knowing the current state of -C gives us additional information over and above knowing A and B. +This states that |C| has a 50% chance of being ON in the next state if it +currently OFF, but a 75% chance of being ON in the next state if it is +currently ON. Thus, unlike the unpartitioned case, knowing the current state of +|C| gives us additional information over and above knowing the state of |A| or +|B|. >>> repertoire = cut_subsystem.cause_repertoire(ABC, ABC) >>> cut_repertoire = (cut_subsystem.cause_repertoire(AB, ABC) * @@ -133,74 +136,77 @@ C gives us additional information over and above knowing A and B. 0.500001 With this partition, the integrated information is :math:`\varphi = 0.5`, but -we must check all possible partitions to find the MIP. +we must check all possible partitions to find the maximally-irreducible cause: - >>> cut_subsystem.core_cause(ABC).purview + >>> mic = cut_subsystem.mic(ABC) + >>> mic.purview (0, 1, 2) - >>> cut_subsystem.core_cause(ABC).phi + >>> mic.phi 0.333333 -It turns out that the MIP is +It turns out that the MIP of the maximally-irreducible cause is .. math:: - \frac{AB}{[\,]} \times \frac{C}{ABC} + \frac{AB}{\varnothing} \times \frac{C}{ABC} -and the integrated information of ABC is :math:`\varphi = 1/3`. +and the integrated information of mechanism |ABC| is :math:`\varphi = 1/3`. Note that in order for a new concept to be created by a cut, there must be a within-mechanism connection severed by the cut. In the previous example, the MIP created a new concept, but the amount of -|small_phi| in the constellation still decreased. This is not always the case. -Next we will look at an example of system whoes MIP increases the amount of -|small_phi|. This example is based on a five node network which follows the -logic of the Rule 154 cellular automaton. Let's first load the network, +|small_phi| in the cause-effect structure still decreased. This is not always +the case. Next we will look at an example of system whoes MIP increases the +amount of |small_phi|. This example is based on a five-node network that +implements the logic of the Rule 154 cellular automaton. Let's first load the +network: >>> network = pyphi.examples.rule154_network() >>> state = (1, 0, 0, 0, 0) For this example, it is the subsystem consisting of |A|, |B|, and |E| that we -explore. This is not the main concept of the system, but it serves as a proof +explore. This is not the major complex of the system, but it serves as a proof of principle regardless. >>> subsystem = pyphi.Subsystem(network, state, (0, 1, 4)) Calculating the MIP of the system, - >>> mip = pyphi.compute.big_mip(subsystem) - >>> mip.phi + >>> sia = pyphi.compute.sia(subsystem) + >>> sia.phi 0.217829 - >>> mip.cut + >>> sia.cut Cut [0, 4] ━━/ /━━➤ [1] -This subsystem has a |big_phi| value of 0.15533, and the MIP cuts the -connections from |AE| to |B|. Investigating the concepts in both the -partitioned and unpartitioned constellations, +we see that this subsystem has a |big_phi| value of 0.15533, and the MIP cuts +the connections from |AE| to |B|. Investigating the concepts in both the +partitioned and unpartitioned cause-effect structures, - >>> mip.unpartitioned_constellation.labeled_mechanisms - [['A'], ['B'], ['A', 'B']] - >>> mip.unpartitioned_constellation.phis + >>> sia.ces.labeled_mechanisms + (['A'], ['B'], ['A', 'B']) + >>> sia.ces.phis [0.25, 0.166667, 0.178572] >>> print(sum(_)) 0.5952390000000001 -The unpartitioned constellation has mechanisms |A|, |B| and |AB| with -:math:`\sum\varphi = 0.595239`. +We see that the unpartitioned cause-effect structure has mechanisms |A|, |B| +and |AB| with :math:`\sum\varphi = 0.595239`. - >>> mip.partitioned_constellation.labeled_mechanisms - [['A'], ['B'], ['A', 'B']] - >>> mip.partitioned_constellation.phis + >>> sia.partitioned_ces.labeled_mechanisms + (['A'], ['B'], ['A', 'B']) + >>> sia.partitioned_ces.phis [0.25, 0.166667, 0.214286] >>> print(sum(_)) 0.630953 -The partitioned constellation has mechanisms |A|, |B| and |AB| but with -:math:`\sum\varphi = 0.630953`. There are the same number of concepts in both -constellations, over the same mechanisms; however, the partitioned -constellation has a greater |small_phi| value for the concept |AB|, resulting -in an overall greater :math:`\sum\varphi` for the partitioned constellation. +The partitioned cause-effect structure has mechanisms |A|, |B| and |AB| but +with :math:`\sum\varphi = 0.630953`. There are the same number of concepts in +both cause-effect structures, over the same mechanisms; however, the +partitioned cause-effect structure has a greater |small_phi| value for the +concept |AB|, resulting in an overall greater :math:`\sum\varphi` for the +partitioned cause-effect structure. Although situations described above are rare, they do occur, so one must be careful when analyzing the integrated information of physical systems not to dismiss the possibility of partitions creating new concepts or increasing the -amount of |small_phi|; otherwise, an incorrect main complex may be identified. +amount of |small_phi|; otherwise, an incorrect major complex may be identified. diff --git a/docs/examples/residue.rst b/docs/examples/residue.rst index 9915da3ae..fc318ef06 100644 --- a/docs/examples/residue.rst +++ b/docs/examples/residue.rst @@ -5,7 +5,7 @@ This example describes a system containing two AND gates, |A| and |B|, with a single overlapping input node. First let's create the subsystem corresponding to the residue network, with all -nodes off in the current and past states. +nodes OFF in the current and previous states. >>> import pyphi >>> subsystem = pyphi.examples.residue_subsystem() @@ -17,14 +17,14 @@ represented by tuples of node indices in the network: >>> B = (1,) >>> AB = (0, 1) -And the possible past purviews that we're interested in: +And the possible cause purviews that we're interested in: >>> CD = (2, 3) >>> DE = (3, 4) >>> CDE = (2, 3, 4) We can then evaluate the cause information for each of the mechanisms over the -past purview |CDE|. +cause purview |CDE|. >>> subsystem.cause_info(A, CDE) 0.333332 @@ -42,12 +42,13 @@ minimally in this system. Instead, we can quantify existence as the irreducible cause information of a mechanism. The MIP of a mechanism is the partition of mechanism and purview which makes the least difference to the cause repertoire (see the documentation -for the |Mip| object). The irreducible cause information is the distance -between the unpartitioned and partitioned repertoires. +for the |RepertoireIrreducibilityAnalysis| object). The irreducible cause +information is the distance between the unpartitioned and partitioned +repertoires. -To calculate the MIP structure of mechanism |AB|: +To analyze the irreducibility of the mechanism |AB| on the cause side: - >>> mip_AB = subsystem.mip_past(AB, CDE) + >>> mip_AB = subsystem.cause_mip(AB, CDE) We can then determine what the specific partition is. @@ -57,7 +58,7 @@ We can then determine what the specific partition is. 2 3,4 The indices ``(0, 1, 2, 3, 4)`` correspond to nodes :math:`A, B, C, D, E` -respectively. Thus the MIP is |(AB / DE) x (∅ / C)|, where :math:`[\,]` +respectively. Thus the MIP is |(AB / DE) x (∅ / C)|, where :math:`\varnothing` denotes the empty mechanism. The partitioned repertoire of the MIP can also be retrieved: @@ -79,21 +80,22 @@ One counterintuitive result that merits discussion is that since irreducible cause information is what defines existence, we must also evaluate the irreducible cause information of the mechanisms |A| and |B|. -The mechanism |A| over the purview |CDE| is completely reducible to |(A / CD) x -(∅ / E)| because |E| has no effect on |A|, so it has zero |small_phi|. +The mechanism |A| over the purview |CDE| is completely reducible to +|(A / CD) x (∅ / E)| because |E| has no effect on |A|, so it has zero +|small_phi|. - >>> subsystem.mip_past(A, CDE).phi + >>> subsystem.cause_mip(A, CDE).phi 0.0 - >>> subsystem.mip_past(A, CDE).partition # doctest: +NORMALIZE_WHITESPACE + >>> subsystem.cause_mip(A, CDE).partition # doctest: +NORMALIZE_WHITESPACE ∅ 0 ─── ✕ ─── 4 2,3 Instead, we should evaluate |A| over the purview |CD|. - >>> mip_A = subsystem.mip_past(A, CD) + >>> mip_A = subsystem.cause_mip(A, CD) -In this case, there is a well defined MIP +In this case, there is a well-defined MIP >>> mip_A.partition # doctest: +NORMALIZE_WHITESPACE ∅ 0 diff --git a/docs/examples/xor.rst b/docs/examples/xor.rst index 3f9fe21ee..165de5f7d 100644 --- a/docs/examples/xor.rst +++ b/docs/examples/xor.rst @@ -9,43 +9,43 @@ First let's create the XOR network: >>> import pyphi >>> network = pyphi.examples.xor_network() -We'll consider the state with all nodes off. +We'll consider the state with all nodes OFF. >>> state = (0, 0, 0) According to IIT, existence is a holistic notion; the whole is more important than its parts. The first step is to confirm the existence of the whole, by -finding the main complex of the network: +finding the major complex of the network: - >>> main_complex = pyphi.compute.main_complex(network, state) + >>> major_complex = pyphi.compute.major_complex(network, state) -The main complex exists (|big_phi > 0|), +The major complex exists (|big_phi > 0|), - >>> main_complex.phi + >>> major_complex.phi 1.874999 and it consists of the entire network: - >>> main_complex.subsystem + >>> major_complex.subsystem Subsystem(A, B, C) Knowing what exists at the system level, we can now investigate the existence of concepts within the complex. - >>> constellation = main_complex.unpartitioned_constellation - >>> len(constellation) + >>> ces = major_complex.ces + >>> len(ces) 3 - >>> constellation.labeled_mechanisms - [['A', 'B'], ['A', 'C'], ['B', 'C']] + >>> ces.labeled_mechanisms + (['A', 'B'], ['A', 'C'], ['B', 'C']) -There are three concepts in the constellation. They are all the possible second -order mechanisms: |AB|, |AC| and |BC|. +There are three concepts in the cause-effect structure. They are all the +possible second order mechanisms: |AB|, |AC| and |BC|. Focusing on the concept specified by mechanism |AB|, we investigate existence, and the irreducible cause and effect. Based on the symmetry of the network, the results will be similar for the other second order mechanisms. - >>> concept = constellation[0] + >>> concept = ces[0] >>> concept.mechanism (0, 1) >>> concept.phi @@ -63,20 +63,20 @@ The concept has :math:`\varphi = \frac{1}{2}`. [ 0. , 0.5]]]) So we see that the cause purview of this mechanism is the whole system |ABC|, -and that the repertoire shows a :math:`0.5` of probability the past state being -``(0, 0, 0)`` and the same for ``(1, 1, 1)``: +and that the repertoire shows a :math:`0.5` of probability the previous state +being ``(0, 0, 0)`` and the same for ``(1, 1, 1)``: >>> concept.cause.repertoire[(0, 0, 0)] 0.5 >>> concept.cause.repertoire[(1, 1, 1)] 0.5 -This tells us that knowing both |A| and |B| are currently off means that -the past state of the system was either all off or all on with equal +This tells us that knowing both |A| and |B| are currently OFF means that +the previous state of the system was either all OFF or all ON with equal probability. For any reduced purview, we would still have the same information about the -elements in the purview (either all on or all off), but we would lose +elements in the purview (either all ON or all OFF), but we would lose the information about the elements outside the purview. >>> concept.effect.purview @@ -85,17 +85,17 @@ the information about the elements outside the purview. array([[[ 1., 0.]]]) The effect purview of this concept is the node |C|. The mechanism |AB| is able -to completely specify the next state of |C|. Since both nodes are off, the -next state of |C| will be off. +to completely specify the next state of |C|. Since both nodes are OFF, the +next state of |C| will be OFF. The mechanism |AB| does not provide any information about the next state of either |A| or |B|, because the relationship depends on the value of |C|. That -is, the next state of |A| (or |B|) may be either on or off, depending +is, the next state of |A| (or |B|) may be either ON or OFF, depending on the value of |C|. Any purview larger than |C| would be reducible by pruning away the additional elements. +------------------------------------------------------------------+ -| Main Complex: |ABC| with :math:`\Phi = 1.875` | +| Major Complex: |ABC| with :math:`\Phi = 1.875` | +---------------+-----------------+---------------+----------------+ | Mechanism | :math:`\varphi` | Cause Purview | Effect Purview | +===============+=================+===============+================+ @@ -106,7 +106,7 @@ away the additional elements. | |BC| | 0.5 | |ABC| | |A| | +---------------+-----------------+---------------+----------------+ -An analysis of the `intrinsic existence` of this system reveals that the main +An analysis of the `intrinsic existence` of this system reveals that the major complex of the system is the entire network of XOR nodes. Furthermore, the concepts which exist within the complex are those specified by the second-order mechanisms |AB|, |AC|, and |BC|. @@ -151,8 +151,8 @@ have no effect power (having causal power is not enough). To see why this is true, consider the effect of |A|. There is no self-loop, so |A| can have no effect on itself. Without knowing the current state of |A|, in -the next state |B| could be either on or off. If we know that the current state -of |A| is on, then |B| could still be either on or off, depending on the state +the next state |B| could be either ON or OFF. If we know that the current state +of |A| is ON, then |B| could still be either ON or OFF, depending on the state of |C|. Thus, on its own, the current state of |A| does not provide any information about the next state of |B|. A similar result holds for the effect of |A| on |C|. Since |A| has no effect power over any element of the system, it @@ -165,9 +165,9 @@ mechanism |ABC|. Consider the cause information over the purview |ABC|: 0.749999 Since the mechanism has nonzero cause information, it has causal power over the -system—but is it irreducible? +system---but is it irreducible? - >>> mip = subsystem.mip_past(ABC, ABC) + >>> mip = subsystem.cause_mip(ABC, ABC) >>> mip.phi 0.0 >>> mip.partition # doctest: +NORMALIZE_WHITESPACE @@ -182,10 +182,10 @@ The mechanism has :math:`ci = 0.75`, but it is completely reducible \frac{A}{\varnothing} \times \frac{BC}{ABC} -This result can be understood as follows: knowing that |B| and |C| are off in -the current state is sufficient to know that |A|, |B|, and |C| were all off in -the past state; there is no additional information gained by knowing that |A| -is currently off. +This result can be understood as follows: knowing that |B| and |C| are OFF in +the current state is sufficient to know that |A|, |B|, and |C| were all OFF in +the previous state; there is no additional information gained by knowing that +|A| is currently OFF. Similarly for any other potential purview, the current state of |B| and |C| being ``(0, 0)`` is always enough to fully specify the previous state, so the diff --git a/docs/index.rst b/docs/index.rst index 2768c24a1..11ad09502 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,59 +10,39 @@ welcome. For general discussion, you are welcome to join the `pyphi-users group `_. -.. important:: - Each version of PyPhi has its own documentation—make sure you're looking - at the documentation for the version you're using. You can switch - documentation versions in the bottom-left corner. +.. _installation: - The ``stable`` version of the documentation corresponds to the most recent - stable release of PyPhi; this is the version you have if you installed - PyPhi with ``pip install pyphi``. The ``latest`` version corresponds to the - most recent unreleased development version (which may have bugs). - -Installation -~~~~~~~~~~~~ - -To install the latest stable release, run - -.. code-block:: bash - - pip install pyphi - -To install the latest development version, which is a work in progress and may have bugs, run - -.. code-block:: bash - - pip install "git+https://github.com/wmayner/pyphi@develop#egg=pyphi" - -**For detailed instructions on how to install PyPhi on macOS, see the** -`installation guide -`_. - -.. note:: - PyPhi is only supported on Linux and macOS operating systems; Windows is not supported. +.. include:: installation.rst .. toctree:: :caption: Usage and Examples :glob: :maxdepth: 1 + installation.rst examples/index - examples/* + examples/2014paper + examples/conditional_independence + examples/xor + examples/emergence + examples/actual_causation + examples/residue + examples/magic_cut + macos_installation .. toctree:: - :caption: Configuration + :caption: Conventions :glob: :maxdepth: 1 - configuration + conventions .. toctree:: - :caption: Conventions + :caption: Configuration :glob: :maxdepth: 1 - conventions + configuration .. toctree:: :caption: API Reference diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 000000000..2e34dc578 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,23 @@ +Installation +~~~~~~~~~~~~ + +To install the latest stable release, run + +.. code-block:: bash + + pip install pyphi + +To install the latest development version, which is a work in progress and may +have bugs, run + +.. code-block:: bash + + pip install "git+https://github.com/wmayner/pyphi@develop#egg=pyphi" + +.. tip:: + For detailed instructions on how to install PyPhi on macOS, see the + :ref:`macos-installation`. + +.. note:: + PyPhi is only supported on Linux and macOS operating systems. Windows is + not supported. diff --git a/docs/macos_installation.rst b/docs/macos_installation.rst new file mode 100644 index 000000000..469b99886 --- /dev/null +++ b/docs/macos_installation.rst @@ -0,0 +1 @@ +.. include:: ../INSTALLATION.rst diff --git a/pyphi/__about__.py b/pyphi/__about__.py index d80c81e1e..29ade2ba4 100644 --- a/pyphi/__about__.py +++ b/pyphi/__about__.py @@ -2,10 +2,10 @@ # -*- coding: utf-8 -*- # __about__.py -'''PyPhi metadata.''' +"""PyPhi metadata.""" __title__ = 'pyphi' -__version__ = '0.9.1' +__version__ = '1.0.0' __description__ = 'Python library for computing integrated information.' __author__ = 'William GP Mayner' __author_email__ = 'wmayner@gmail.com' diff --git a/pyphi/__init__.py b/pyphi/__init__.py index ffe2d12fd..45699dae5 100644 --- a/pyphi/__init__.py +++ b/pyphi/__init__.py @@ -12,7 +12,7 @@ # _| # _|_|_| -''' +""" ===== PyPhi ===== @@ -22,13 +22,15 @@ See the documentation for the |examples| module for information on how to use it. -To report issues, please use the issue tracker on the `GitHub repository -`_. Bug reports and pull requests are -welcome. +Online documentation is available at ``_. For general discussion, you are welcome to join the `pyphi-users group `_. +To report issues, please use the issue tracker on the `GitHub repository +`_. Bug reports and pull requests are +welcome. + Usage ~~~~~ @@ -40,14 +42,15 @@ network. |big_phi| is a function of subsystems. The |compute| module is the main entry-point for the library. It contains -methods for calculating concepts, constellations, complexes, etc. See its -documentation for details. +methods for calculating concepts, cause-effect structures, complexes, etc. See +its documentation for details. Configuration (optional) ~~~~~~~~~~~~~~~~~~~~~~~~ -There are several package-level options that control aspects of the computation. +There are several package-level options that control aspects of the +computation. These are loaded from a YAML configuration file, ``pyphi_config.yml``. **This file must be in the directory where PyPhi is run**. If there is no such file, @@ -58,7 +61,7 @@ See the documentation for the |config| module for a description of the options and their defaults. -''' +""" from .__about__ import * # pylint: disable=wildcard-import diff --git a/pyphi/actual.py b/pyphi/actual.py index 1584d07a6..297cbd589 100644 --- a/pyphi/actual.py +++ b/pyphi/actual.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # actual.py -''' +""" Methods for computing actual causation of subsystems and mechanisms. -''' +""" # pylint: disable=too-many-instance-attributes, too-many-arguments # pylint: disable=too-many-public-methods @@ -15,27 +15,26 @@ import numpy as np -import pyphi - from . import (Direction, compute, config, connectivity, constants, exceptions, utils, validate) -from .models import (AcBigMip, Account, AcMip, ActualCut, CausalLink, - DirectedAccount, Event, NullCut, - _null_ac_bigmip, _null_ac_mip, fmt) +from .models import (Account, AcRepertoireIrreducibilityAnalysis, + AcSystemIrreducibilityAnalysis, ActualCut, CausalLink, + DirectedAccount, Event, NullCut, _null_ac_ria, + _null_ac_sia, fmt) from .subsystem import Subsystem, mip_partitions log = logging.getLogger(__name__) def log2(x): - '''Rounded version of ``log2``.''' + """Rounded version of ``log2``.""" return round(_log2(x), config.PRECISION) class Transition: - '''A state transition between two sets of nodes in a network. + """A state transition between two sets of nodes in a network. - A |Transition| is implemented with two |Subsystem| objects - one + A |Transition| is implemented with two |Subsystem| objects: one representing the system at time |t-1| used to compute effect coefficients, and another representing the system at time |t| which is used to compute cause coefficients. These subsystems are accessed with the @@ -76,7 +75,7 @@ class Transition: conditioned on ``before_state`` as the background state. After conditioning the ``effect_system`` is then properly reset to ``after_state``. - ''' + """ def __init__(self, network, before_state, after_state, cause_indices, effect_indices, cut=None, noise_background=False): @@ -129,8 +128,8 @@ def __init__(self, network, before_state, after_state, cause_indices, # Dictionary mapping causal directions to the system which is used to # compute repertoires in that direction self.system = { - Direction.PAST: self.cause_system, - Direction.FUTURE: self.effect_system + Direction.CAUSE: self.cause_system, + Direction.EFFECT: self.effect_system } def __repr__(self): @@ -140,16 +139,19 @@ def __str__(self): return repr(self) def __eq__(self, other): - return (self.cause_indices == other.cause_indices - and self.effect_indices == other.effect_indices - and self.before_state == other.before_state - and self.after_state == other.after_state - and self.network == other.network - and self.cut == other.cut) + return ( + self.cause_indices == other.cause_indices and + self.effect_indices == other.effect_indices and + self.before_state == other.before_state and + self.after_state == other.after_state and + self.network == other.network and + self.cut == other.cut + ) def __hash__(self): - return hash((self.cause_indices, self.effect_indices, self.before_state, - self.after_state, self.network, self.cut)) + return hash((self.cause_indices, self.effect_indices, + self.before_state, self.after_state, self.network, + self.cut)) def __len__(self): return len(self.node_indices) @@ -158,7 +160,7 @@ def __bool__(self): return len(self) > 0 def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return { 'network': self.network, 'before_state': self.before_state, @@ -169,34 +171,33 @@ def to_json(self): } def apply_cut(self, cut): - '''Return a cut version of this transition.''' + """Return a cut version of this transition.""" return Transition(self.network, self.before_state, self.after_state, self.cause_indices, self.effect_indices, cut) def cause_repertoire(self, mechanism, purview): - '''Return the cause repertoire.''' - return self.repertoire(Direction.PAST, mechanism, purview) + """Return the cause repertoire.""" + return self.repertoire(Direction.CAUSE, mechanism, purview) def effect_repertoire(self, mechanism, purview): - '''Return the effect repertoire.''' - return self.repertoire(Direction.FUTURE, mechanism, purview) + """Return the effect repertoire.""" + return self.repertoire(Direction.EFFECT, mechanism, purview) def unconstrained_cause_repertoire(self, purview): - '''Return the unconstrained cause repertoire of the occurence.''' + """Return the unconstrained cause repertoire of the occurence.""" return self.cause_repertoire((), purview) def unconstrained_effect_repertoire(self, purview): - '''Return the unconstrained effect repertoire of the occurence.''' + """Return the unconstrained effect repertoire of the occurence.""" return self.effect_repertoire((), purview) def repertoire(self, direction, mechanism, purview): - '''Returns the cause or effect repertoire function based on a - direction. + """Return the cause or effect repertoire function based on a direction. Args: direction (str): The temporal direction, specifiying the cause or effect repertoire. - ''' + """ system = self.system[direction] if not set(purview).issubset(self.purview_indices(direction)): @@ -210,15 +211,16 @@ def repertoire(self, direction, mechanism, purview): return system.repertoire(direction, mechanism, purview) def state_probability(self, direction, repertoire, purview,): - '''Compute the probability of the purview in its current state given + """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. - Returns a single probabilty. - ''' + Returns: + float: A single probabilty. + """ purview_state = self.purview_state(direction) index = tuple(node_state if node in purview else 0 @@ -226,46 +228,48 @@ def state_probability(self, direction, repertoire, purview,): return repertoire[index] def probability(self, direction, mechanism, purview): - '''Probability that the purview is in it's current state given the - state of the mechanism.''' + """Probability that the purview is in it's current state given the + state of the mechanism. + """ repertoire = self.repertoire(direction, mechanism, purview) return self.state_probability(direction, repertoire, purview) def unconstrained_probability(self, direction, purview): - '''Unconstrained probability of the purview.''' + """Unconstrained probability of the purview.""" return self.probability(direction, (), purview) def purview_state(self, direction): - '''The state of the purview when we are computing coefficients in + """The state of the purview when we are computing coefficients in ``direction``. For example, if we are computing the cause coefficient of a mechanism - in ``after_state``, the direction is``PAST`` and the ``purview_state`` + in ``after_state``, the direction is``CAUSE`` and the ``purview_state`` is ``before_state``. - ''' + """ return { - Direction.PAST: self.before_state, - Direction.FUTURE: self.after_state + Direction.CAUSE: self.before_state, + Direction.EFFECT: self.after_state }[direction] def mechanism_state(self, direction): - '''The state of the mechanism when we are computing coefficients in - ``direction``.''' + """The state of the mechanism when computing coefficients in + ``direction``. + """ return self.system[direction].state def mechanism_indices(self, direction): - '''The indices of nodes in the mechanism system.''' + """The indices of nodes in the mechanism system.""" return { - Direction.PAST: self.effect_indices, - Direction.FUTURE: self.cause_indices + Direction.CAUSE: self.effect_indices, + Direction.EFFECT: self.cause_indices }[direction] def purview_indices(self, direction): - '''The indices of nodes in the purview system.''' + """The indices of nodes in the purview system.""" return { - Direction.PAST: self.cause_indices, - Direction.FUTURE: self.effect_indices + Direction.CAUSE: self.cause_indices, + Direction.EFFECT: self.effect_indices }[direction] def _ratio(self, direction, mechanism, purview): @@ -273,21 +277,22 @@ def _ratio(self, direction, mechanism, purview): self.unconstrained_probability(direction, purview)) def cause_ratio(self, mechanism, purview): - '''The cause ratio of the ``purview`` given ``mechanism``.''' - return self._ratio(Direction.PAST, mechanism, purview) + """The cause ratio of the ``purview`` given ``mechanism``.""" + return self._ratio(Direction.CAUSE, mechanism, purview) def effect_ratio(self, mechanism, purview): - '''The effect ratio of the ``purview`` given ``mechanism``.''' - return self._ratio(Direction.FUTURE, mechanism, purview) + """The effect ratio of the ``purview`` given ``mechanism``.""" + return self._ratio(Direction.EFFECT, mechanism, purview) def partitioned_repertoire(self, direction, partition): - '''Compute the repertoire over the partition in the given direction.''' + """Compute the repertoire over the partition in the given direction.""" system = self.system[direction] return system.partitioned_repertoire(direction, partition) def partitioned_probability(self, direction, partition): - '''Compute the probability of the mechanism over the purview in - the partition.''' + """Compute the probability of the mechanism over the purview in + the partition. + """ repertoire = self.partitioned_repertoire(direction, partition) return self.state_probability(direction, repertoire, partition.purview) @@ -296,11 +301,11 @@ def partitioned_probability(self, direction, partition): # TODO: alias to `irreducible_cause/effect ratio? def find_mip(self, direction, mechanism, purview, allow_neg=False): - '''Find the ratio minimum information partition for a mechanism + """Find the ratio minimum information partition for a mechanism over a purview. Args: - direction (str): |PAST| or |FUTURE| + direction (str): |CAUSE| or |EFFECT| mechanism (tuple[int]): A mechanism. purview (tuple[int]): A purview. @@ -310,8 +315,9 @@ def find_mip(self, direction, mechanism, purview, allow_neg=False): they were 0. Returns: - AcMip: The found MIP. - ''' + AcRepertoireIrreducibilityAnalysis: The irreducibility analysis for + the mechanism. + """ alpha_min = float('inf') probability = self.probability(direction, mechanism, purview) @@ -324,52 +330,59 @@ def find_mip(self, direction, mechanism, purview, allow_neg=False): # First check for 0 # Default: don't count contrary causes and effects if utils.eq(alpha, 0) or (alpha < 0 and not allow_neg): - return AcMip(state=self.mechanism_state(direction), - direction=direction, - mechanism=mechanism, - purview=purview, - partition=partition, - probability=probability, - partitioned_probability=partitioned_probability, - alpha=0.0) + return AcRepertoireIrreducibilityAnalysis( + state=self.mechanism_state(direction), + direction=direction, + mechanism=mechanism, + purview=purview, + partition=partition, + probability=probability, + partitioned_probability=partitioned_probability, + alpha=0.0 + ) # Then take closest to 0 if (abs(alpha_min) - abs(alpha)) > constants.EPSILON: alpha_min = alpha - acmip = AcMip(state=self.mechanism_state(direction), - direction=direction, - mechanism=mechanism, - purview=purview, - partition=partition, - probability=probability, - partitioned_probability=partitioned_probability, - alpha=alpha_min) - return acmip + acria = AcRepertoireIrreducibilityAnalysis( + state=self.mechanism_state(direction), + direction=direction, + mechanism=mechanism, + purview=purview, + partition=partition, + probability=probability, + partitioned_probability=partitioned_probability, + alpha=alpha_min + ) + return acria # Phi_max methods # ========================================================================= def potential_purviews(self, direction, mechanism, purviews=False): - '''Return all purviews that could belong to the core cause/effect. + """Return all purviews that could belong to the |MIC|/|MIE|. Filters out trivially-reducible purviews. Args: - direction (str): Either |PAST| or |FUTURE|. + direction (str): Either |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism of interest. Keyword Args: purviews (tuple[int]): Optional subset of purviews of interest. - ''' + """ system = self.system[direction] - return [purview for purview in system.potential_purviews( - direction, mechanism, purviews) - if set(purview).issubset(self.purview_indices(direction))] + return [ + purview for purview in system.potential_purviews( + direction, mechanism, purviews) + if set(purview).issubset(self.purview_indices(direction)) + ] # TODO: Implement mice cache # @cache.method('_mice_cache') def find_causal_link(self, direction, mechanism, purviews=False, allow_neg=False): - '''Return the maximally irreducible cause or effect ratio for a mechanism. + """Return the maximally irreducible cause or effect ratio for a + mechanism. Args: direction (str): The temporal direction, specifying cause or @@ -385,43 +398,43 @@ def find_causal_link(self, direction, mechanism, purviews=False, Returns: CausalLink: The maximally-irreducible actual cause or effect. - ''' + """ purviews = self.potential_purviews(direction, mechanism, purviews) - # Find the maximal MIP over the remaining purviews. + # Find the maximal RIA over the remaining purviews. if not purviews: - max_mip = _null_ac_mip(self.mechanism_state(direction), + max_ria = _null_ac_ria(self.mechanism_state(direction), direction, mechanism, None) else: # This max should be most positive - max_mip = max(self.find_mip(direction, mechanism, purview, + max_ria = max(self.find_mip(direction, mechanism, purview, allow_neg) for purview in purviews) # Construct the corresponding CausalLink - return CausalLink(max_mip) + return CausalLink(max_ria) def find_actual_cause(self, mechanism, purviews=False): - '''Return the actual cause of a mechanism.''' - return self.find_causal_link(Direction.PAST, mechanism, purviews) + """Return the actual cause of a mechanism.""" + return self.find_causal_link(Direction.CAUSE, mechanism, purviews) def find_actual_effect(self, mechanism, purviews=False): - '''Return the actual effect of a mechanism.''' - return self.find_causal_link(Direction.FUTURE, mechanism, purviews) + """Return the actual effect of a mechanism.""" + return self.find_causal_link(Direction.EFFECT, mechanism, purviews) def find_mice(self, *args, **kwargs): - '''Backwards-compatible alias for :func:`find_causal_link`.''' + """Backwards-compatible alias for :func:`find_causal_link`.""" return self.find_causal_link(*args, **kwargs) -# ============================================================================ +# ============================================================================= # Accounts -# ============================================================================ +# ============================================================================= def directed_account(transition, direction, mechanisms=False, purviews=False, allow_neg=False): - '''Return the set of all |CausalLinks| of the specified direction.''' + """Return the set of all |CausalLinks| of the specified direction.""" if mechanisms is False: mechanisms = utils.powerset(transition.mechanism_indices(direction), nonempty=True) @@ -435,7 +448,7 @@ def directed_account(transition, direction, mechanisms=False, purviews=False, def account(transition, direction=Direction.BIDIRECTIONAL): - '''Return the set of all causal links for a |Transition|. + """Return the set of all causal links for a |Transition|. Args: transition (Transition): The transition of interest. @@ -443,21 +456,21 @@ def account(transition, direction=Direction.BIDIRECTIONAL): Keyword Args: direction (Direction): By default the account contains actual causes and actual effects. - ''' + """ if direction != Direction.BIDIRECTIONAL: return directed_account(transition, direction) - return Account(directed_account(transition, Direction.PAST) + - directed_account(transition, Direction.FUTURE)) + return Account(directed_account(transition, Direction.CAUSE) + + directed_account(transition, Direction.EFFECT)) -# ============================================================================ -# AcBigMips and System cuts -# ============================================================================ +# ============================================================================= +# AcSystemIrreducibilityAnalysiss and System cuts +# ============================================================================= def account_distance(A1, A2): - '''Return the distance between two accounts. Here that is just the + """Return the distance between two accounts. Here that is just the difference in sum(alpha) Args: @@ -466,24 +479,24 @@ def account_distance(A1, A2): Returns: float: The distance between the two accounts. - ''' - return (sum([action.alpha for action in A1]) - - sum([action.alpha for action in A2])) + """ + return (sum([action.alpha for action in A1]) - + sum([action.alpha for action in A2])) def _evaluate_cut(transition, cut, unpartitioned_account, direction=Direction.BIDIRECTIONAL): - '''Find the |AcBigMip| for a given cut.''' + """Find the |AcSystemIrreducibilityAnalysis| for a given cut.""" cut_transition = transition.apply_cut(cut) partitioned_account = account(cut_transition, direction) log.debug("Finished evaluating %s.", cut) alpha = account_distance(unpartitioned_account, partitioned_account) - return AcBigMip( + return AcSystemIrreducibilityAnalysis( alpha=round(alpha, config.PRECISION), direction=direction, - unpartitioned_account=unpartitioned_account, + account=unpartitioned_account, partitioned_account=partitioned_account, transition=transition, cut=cut) @@ -491,13 +504,13 @@ def _evaluate_cut(transition, cut, unpartitioned_account, # TODO: implement CUT_ONE approximation? def _get_cuts(transition, direction): - '''A list of possible cuts to a transition.''' + """A list of possible cuts to a transition.""" n = transition.network.size if direction is Direction.BIDIRECTIONAL: yielded = set() - for cut in chain(_get_cuts(transition, Direction.PAST), - _get_cuts(transition, Direction.FUTURE)): + for cut in chain(_get_cuts(transition, Direction.CAUSE), + _get_cuts(transition, Direction.EFFECT)): cm = utils.np_hashable(cut.cut_matrix(n)) if cm not in yielded: yielded.add(cm) @@ -510,87 +523,90 @@ def _get_cuts(transition, direction): yield ActualCut(direction, partition) -def big_acmip(transition, direction=Direction.BIDIRECTIONAL): - '''Return the minimal information partition of a transition in a specific +def sia(transition, direction=Direction.BIDIRECTIONAL): + """Return the minimal information partition of a transition in a specific direction. Args: transition (Transition): The candidate system. Returns: - AcBigMip: A nested structure containing all the data from the - intermediate calculations. The top level contains the basic MIP - information for the given subsystem. - ''' + AcSystemIrreducibilityAnalysis: A nested structure containing all the + data from the intermediate calculations. The top level contains the + basic irreducibility information for the given subsystem. + """ validate.direction(direction, allow_bi=True) log.info("Calculating big-alpha for %s...", transition) if not transition: - log.info('Transition %s is empty; returning null MIP ' + log.info('Transition %s is empty; returning null SIA ' 'immediately.', transition) - return _null_ac_bigmip(transition, direction) + return _null_ac_sia(transition, direction) - if not connectivity.is_weak(transition.network.cm, transition.node_indices): - log.info('%s is not strongly/weakly connected; returning null MIP ' + if not connectivity.is_weak(transition.network.cm, + transition.node_indices): + log.info('%s is not strongly/weakly connected; returning null SIA ' 'immediately.', transition) - return _null_ac_bigmip(transition, direction) + return _null_ac_sia(transition, direction) log.debug("Finding unpartitioned account...") unpartitioned_account = account(transition, direction) log.debug("Found unpartitioned account.") if not unpartitioned_account: - log.info('Empty account; returning null AC MIP immediately.') - return _null_ac_bigmip(transition, direction) + log.info('Empty unpartitioned account; returning null AC SIA ' + 'immediately.') + return _null_ac_sia(transition, direction) cuts = _get_cuts(transition, direction) - finder = FindBigAcMip(cuts, transition, direction, unpartitioned_account) - result = finder.run_sequential() + engine = ComputeACSystemIrreducibility( + cuts, transition, direction, unpartitioned_account) + result = engine.run_sequential() log.info("Finished calculating big-ac-phi data for %s.", transition) log.debug("RESULT: \n%s", result) return result -class FindBigAcMip(compute.parallel.MapReduce): - """Computation engine for AC BigMips.""" +class ComputeACSystemIrreducibility(compute.parallel.MapReduce): + """Computation engine for AC SIAs.""" # pylint: disable=unused-argument,arguments-differ description = 'Evaluating AC cuts' - def empty_result(self, transition, direction, unpartitioned_account): - return _null_ac_bigmip(transition, direction, alpha=float('inf')) + def empty_result(self, transition, direction, account): + return _null_ac_sia(transition, direction, alpha=float('inf')) @staticmethod - def compute(cut, transition, direction, unpartitioned_account): - return _evaluate_cut(transition, cut, unpartitioned_account, direction) + def compute(cut, transition, direction, account): + return _evaluate_cut(transition, cut, account, direction) - def process_result(self, new_mip, min_mip): + def process_result(self, new_sia, min_sia): # Check a new result against the running minimum - if not new_mip: # alpha == 0 + if not new_sia: # alpha == 0 self.done = True - return new_mip + return new_sia - elif new_mip < min_mip: - return new_mip + elif new_sia < min_sia: + return new_sia - return min_mip + return min_sia -# ============================================================================ +# ============================================================================= # Complexes -# ============================================================================ +# ============================================================================= # TODO: Fix this to test whether the transition is possible def transitions(network, before_state, after_state): - '''Return a generator of all **possible** transitions of a network. - ''' + """Return a generator of all **possible** transitions of a network. + """ # TODO: Does not return subsystems that are in an impossible transitions. # Elements without inputs are reducibe effects, # elements without outputs are reducible causes. - possible_causes = np.where(np.sum(network.connectivity_matrix, 1) > 0)[0] - possible_effects = np.where(np.sum(network.connectivity_matrix, 0) > 0)[0] + possible_causes = np.where(np.sum(network.cm, 1) > 0)[0] + possible_effects = np.where(np.sum(network.cm, 0) > 0)[0] for cause_subset in utils.powerset(possible_causes, nonempty=True): for effect_subset in utils.powerset(possible_effects, nonempty=True): @@ -603,17 +619,17 @@ def transitions(network, before_state, after_state): def nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): - '''Return a tuple of all irreducible nexus of the network.''' + """Return a tuple of all irreducible nexus of the network.""" validate.is_network(network) - mips = (big_acmip(transition, direction) for transition in + sias = (sia(transition, direction) for transition in transitions(network, before_state, after_state)) - return tuple(sorted(filter(None, mips), reverse=True)) + return tuple(sorted(filter(None, sias), reverse=True)) def causal_nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): - '''Return the causal nexus of the network.''' + """Return the causal nexus of the network.""" validate.is_network(network) log.info("Calculating causal nexus...") @@ -621,65 +637,66 @@ def causal_nexus(network, before_state, after_state, if result: result = max(result) else: - null_transition = Transition(network, before_state, after_state, (), ()) - result = _null_ac_bigmip(null_transition, direction) + null_transition = Transition( + network, before_state, after_state, (), ()) + result = _null_ac_sia(null_transition, direction) log.info("Finished calculating causal nexus.") log.debug("RESULT: \n%s", result) return result -# ============================================================================ +# ============================================================================= # True Causes -# ============================================================================ +# ============================================================================= # TODO: move this to __str__ -def nice_true_constellation(tc): - '''Format a true constellation.''' - past_list = [] - future_list = [] +def nice_true_ces(tc): + """Format a true |CauseEffectStructure|.""" + cause_list = [] + next_list = [] cause = '<--' effect = '-->' for event in tc: - if event.direction == Direction.PAST: - past_list.append(["{0:.4f}".format(round(event.alpha, 4)), - event.mechanism, cause, event.purview]) - elif event.direction == Direction.FUTURE: - future_list.append(["{0:.4f}".format(round(event.alpha, 4)), - event.mechanism, effect, event.purview]) + if event.direction == Direction.CAUSE: + cause_list.append(["{0:.4f}".format(round(event.alpha, 4)), + event.mechanism, cause, event.purview]) + elif event.direction == Direction.EFFECT: + next_list.append(["{0:.4f}".format(round(event.alpha, 4)), + event.mechanism, effect, event.purview]) else: validate.direction(event.direction) - true_list = [(past_list[event], future_list[event]) - for event in range(len(past_list))] + true_list = [(cause_list[event], next_list[event]) + for event in range(len(cause_list))] return true_list -def _actual_causes(network, past_state, current_state, nodes, +def _actual_causes(network, previous_state, current_state, nodes, mechanisms=False): log.info("Calculating true causes ...") - transition = Transition(network, past_state, current_state, nodes, nodes) + transition = Transition( + network, previous_state, current_state, nodes, nodes) - return directed_account(transition, Direction.PAST, mechanisms=mechanisms) + return directed_account(transition, Direction.CAUSE, mechanisms=mechanisms) -def _actual_effects(network, current_state, future_state, nodes, +def _actual_effects(network, current_state, next_state, nodes, mechanisms=False): log.info("Calculating true effects ...") - transition = Transition(network, current_state, future_state, nodes, nodes) + transition = Transition(network, current_state, next_state, nodes, nodes) - return directed_account(transition, Direction.FUTURE, mechanisms=mechanisms) + return directed_account( + transition, Direction.EFFECT, mechanisms=mechanisms) -def events(network, past_state, current_state, future_state, nodes, +def events(network, previous_state, current_state, next_state, nodes, mechanisms=False): - '''Find all events (mechanisms with actual causes and actual effects.''' - - actual_causes = _actual_causes(network, past_state, current_state, nodes, - mechanisms) - actual_effects = _actual_effects(network, current_state, future_state, + """Find all events (mechanisms with actual causes and actual effects).""" + actual_causes = _actual_causes(network, previous_state, current_state, + nodes, mechanisms) + actual_effects = _actual_effects(network, current_state, next_state, nodes, mechanisms) - actual_mechanisms = (set(c.mechanism for c in actual_causes) & set(c.mechanism for c in actual_effects)) @@ -687,8 +704,9 @@ def events(network, past_state, current_state, future_state, nodes, return () def index(actual_causes_or_effects): - '''Filter out unidirectional occurences and return a - dictionary keyed by the mechanism of the cause or effect.''' + """Filter out unidirectional occurences and return a dictionary keyed + by the mechanism of the cause or effect. + """ return {o.mechanism: o for o in actual_causes_or_effects if o.mechanism in actual_mechanisms} @@ -700,20 +718,20 @@ def index(actual_causes_or_effects): # TODO: do we need this? it's just a re-structuring of the `events` results -# TODO: rename to `actual_constellation`? -def true_constellation(subsystem, past_state, future_state): - '''Set of all sets of elements that have true causes and true effects. +# TODO: rename to `actual_ces`? +def true_ces(subsystem, previous_state, next_state): + """Set of all sets of elements that have true causes and true effects. .. note:: - Since the true constellation is always about the full system, + Since the true |CauseEffectStructure| is always about the full system, the background conditions don't matter and the subsystem should be conditioned on the current state. - ''' + """ network = subsystem.network nodes = subsystem.node_indices state = subsystem.state - _events = events(network, past_state, state, future_state, nodes) + _events = events(network, previous_state, state, next_state, nodes) if not _events: log.info("Finished calculating, no echo events.") @@ -727,67 +745,67 @@ def true_constellation(subsystem, past_state, future_state): return result -def true_events(network, past_state, current_state, future_state, indices=None, - main_complex=None): - '''Return all mechanisms that have true causes and true effects within the +def true_events(network, previous_state, current_state, next_state, + indices=None, major_complex=None): + """Return all mechanisms that have true causes and true effects within the complex. Args: network (Network): The network to analyze. - past_state (tuple[int]): The state of the network at ``t - 1``. + previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. - future_state (tuple[int]): The state of the network at ``t + 1``. + next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: - indices (tuple[int]): The indices of the main complex. - main_complex (AcBigMip): The main complex. If ``main_complex`` is given - then ``indices`` is ignored. + indices (tuple[int]): The indices of the major complex. + major_complex (AcSystemIrreducibilityAnalysis): The major complex. If + ``major_complex`` is given then ``indices`` is ignored. Returns: - tuple[Event]: List of true events in the main complex. - ''' + tuple[Event]: List of true events in the major complex. + """ # TODO: validate triplet of states - if main_complex: - nodes = main_complex.subsystem.node_indices + if major_complex: + nodes = major_complex.subsystem.node_indices elif indices: nodes = indices else: - main_complex = compute.main_complex(network, current_state) - nodes = main_complex.subsystem.node_indices + major_complex = compute.major_complex(network, current_state) + nodes = major_complex.subsystem.node_indices - return events(network, past_state, current_state, future_state, nodes) + return events(network, previous_state, current_state, next_state, nodes) -def extrinsic_events(network, past_state, current_state, future_state, - indices=None, main_complex=None): - '''Set of all mechanisms that are in the main complex but which have true +def extrinsic_events(network, previous_state, current_state, next_state, + indices=None, major_complex=None): + """Set of all mechanisms that are in the major complex but which have true causes and effects within the entire network. Args: network (Network): The network to analyze. - past_state (tuple[int]): The state of the network at ``t - 1``. + previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. - future_state (tuple[int]): The state of the network at ``t + 1``. + next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: - indices (tuple[int]): The indices of the main complex. - main_complex (AcBigMip): The main complex. If ``main_complex`` is given - then ``indices`` is ignored. + indices (tuple[int]): The indices of the major complex. + major_complex (AcSystemIrreducibilityAnalysis): The major complex. If + ``major_complex`` is given then ``indices`` is ignored. Returns: - tuple(actions): List of extrinsic events in the main complex. - ''' - if main_complex: - mc_nodes = main_complex.subsystem.node_indices + tuple(actions): List of extrinsic events in the major complex. + """ + if major_complex: + mc_nodes = major_complex.subsystem.node_indices elif indices: mc_nodes = indices else: - main_complex = compute.main_complex(network, current_state) - mc_nodes = main_complex.subsystem.node_indices + major_complex = compute.major_complex(network, current_state) + mc_nodes = major_complex.subsystem.node_indices mechanisms = list(utils.powerset(mc_nodes, nonempty=True)) all_nodes = network.node_indices - return events(network, past_state, current_state, future_state, all_nodes, - mechanisms=mechanisms) + return events(network, previous_state, current_state, next_state, + all_nodes, mechanisms=mechanisms) diff --git a/pyphi/cache.py b/pyphi/cache.py index 32728cab8..4c00448f4 100644 --- a/pyphi/cache.py +++ b/pyphi/cache.py @@ -2,12 +2,13 @@ # -*- coding: utf-8 -*- # cache.py -''' -A memory-limited cache decorator. -''' +""" +Memoization and caching utilities. +""" # pylint: disable=redefined-builtin,redefined-outer-name,missing-docstring -# pylint: disable=too-few-public-methods,no-self-use,arguments-differ, +# pylint: disable=too-few-public-methods,no-self-use,arguments-differ +# pylint: disable=dangerous-default-value,redefined-builtin,too-many-arguments # pylint: disable=abstract-method import os @@ -23,17 +24,17 @@ def memory_full(): - '''Check if the memory is too full for further caching.''' + """Check if the memory is too full for further caching.""" current_process = psutil.Process(os.getpid()) return (current_process.memory_percent() > config.MAXIMUM_CACHE_MEMORY_PERCENTAGE) class _HashedSeq(list): - '''This class guarantees that ``hash()`` will be called no more than once + """This class guarantees that ``hash()`` will be called no more than once per element. This is important because the ``lru_cache()`` will hash the key multiple times on a cache miss. - ''' + """ __slots__ = ('hashvalue',) @@ -46,12 +47,11 @@ def __hash__(self): return self.hashvalue -# pylint: disable=dangerous-default-value,redefined-builtin,too-many-arguments def _make_key(args, kwds, typed, kwd_mark=(object(),), fasttypes={int, str, frozenset, type(None)}, sorted=sorted, tuple=tuple, type=type, len=len): - '''Make a cache key from optionally typed positional and keyword arguments. + """Make a cache key from optionally typed positional and keyword arguments. The key is constructed in a way that is flat as possible rather than as a nested structure that would take more memory. @@ -59,7 +59,7 @@ def _make_key(args, kwds, typed, If there is only a single argument and its data type is known to cache its hash value, then that argument is returned without a wrapper. This saves space and improves lookup speed. - ''' + """ key = args if kwds: sorted_items = sorted(kwds.items()) @@ -77,7 +77,7 @@ def _make_key(args, kwds, typed, def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE, typed=False): - '''Memory-limited cache decorator. + """Memory-limited cache decorator. ``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum percentage of physical memory that the cache can use. @@ -91,7 +91,7 @@ def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE, View the cache statistics named tuple (hits, misses, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. - ''' + """ # Constants shared by all lru cache instances: # Unique object used to signal cache misses. sentinel = object() @@ -139,11 +139,11 @@ def wrapper(*args, **kwds): return result def cache_info(): - '''Report cache statistics.''' + """Report cache statistics.""" return _CacheInfo(hits, misses, len(cache)) def cache_clear(): - '''Clear the cache and cache statistics.''' + """Clear the cache and cache statistics.""" nonlocal hits, misses, full cache.clear() hits = misses = 0 @@ -157,10 +157,11 @@ def cache_clear(): class DictCache: - '''A generic dictionary-based cache. + """A generic dictionary-based cache. Intended to be used as an object-level cache of method results. - ''' + """ + def __init__(self): self.cache = {} self.hits = 0 @@ -172,19 +173,19 @@ def clear(self): self.misses = 0 def size(self): - '''Number of items in cache''' + """Number of items in cache""" return len(self.cache) def info(self): - '''Return info about cache hits, misses, and size''' + """Return info about cache hits, misses, and size""" return _CacheInfo(self.hits, self.misses, self.size()) def get(self, key): - '''Get a value out of the cache. + """Get a value out of the cache. Returns None if the key is not in the cache. Updates cache statistics. - ''' + """ if key in self.cache: self.hits += 1 return self.cache[key] @@ -192,34 +193,36 @@ def get(self, key): return None def set(self, key, value): - '''Set a value in the cache''' + """Set a value in the cache""" self.cache[key] = value # TODO: handle **kwarg keys if needed # See joblib.func_inspect.filter_args def key(self, *args, _prefix=None, **kwargs): - '''Get the cache key for the given function args. + """Get the cache key for the given function args. Kwargs: prefix: A constant to prefix to the key. - ''' + """ if kwargs: raise NotImplementedError( 'kwarg cache keys not implemented') return (_prefix,) + tuple(args) -# TODO: confirm that a global connection/pool makes sense, esp for multiprocesssing +# TODO: confirm that a global connection/pool makes sense, esp for +# multiprocesssing # TODO: maybe just expose the connction `if REDIS_CACHE`, instead of with this # singleton business class RedisConn: - '''Singleton redis connection object. + """Singleton redis connection object. Expose the StrictRedis api, but only maintain one connection pool. Raises: redis.exceptions.ConnectionError: If the Redis server is not available. - ''' + """ + instance = None def __init__(self): @@ -233,7 +236,7 @@ def __init__(self): RedisConn.instance = conn def __getattr__(self, name): - '''Delegate lookup to ``StrictRedis``''' + """Delegate lookup to ``StrictRedis``""" return getattr(self.instance, name) @@ -246,27 +249,27 @@ def clear(self): @staticmethod def size(): - '''Size of the Redis cache. + """Size of the Redis cache. .. note:: This is the size of the entire Redis database. - ''' + """ return RedisConn().dbsize() def info(self): - '''Return cache information. + """Return cache information. .. note:: This is not the cache info for the entire Redis key space. - ''' + """ info = RedisConn().info() return _CacheInfo(info['keyspace_hits'], info['keyspace_misses'], self.size()) def get(self, key): - '''Get a value from the cache. + """Get a value from the cache. Returns None if the key is not in the cache. - ''' + """ value = RedisConn().get(key) if value is not None: @@ -275,12 +278,12 @@ def get(self, key): return value def set(self, key, value): - '''Set a value in the cache.''' + """Set a value in the cache.""" value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL) RedisConn().set(key, value) def key(self): - '''Delegate to subclasses.''' + """Delegate to subclasses.""" raise NotImplementedError @@ -291,11 +294,12 @@ def validate_parent_cache(parent_cache): raise ValueError("parent_cache must be from an uncut subsystem") -class RedisMiceCache(RedisCache): - '''A Redis-backed cache for `Subsystem.find_mice`. +class RedisMICECache(RedisCache): + """A Redis-backed cache for `Subsystem.find_mice`. + + See |MICECache| for more info. + """ - See |MiceCache| for more info. - ''' def __init__(self, subsystem, parent_cache=None): super().__init__() self.subsystem = subsystem @@ -303,9 +307,9 @@ def __init__(self, subsystem, parent_cache=None): if parent_cache is not None: validate_parent_cache(parent_cache) - # Store the hash of the parent subsystem. We don't want to store the - # parent subsystem explicitly so that it does not need to be passed - # between processes. + # Store the hash of the parent subsystem. We don't want to store + # the parent subsystem explicitly so that it does not need to be + # passed between processes. self.parent_subsystem_hash = parent_cache.subsystem_hash else: self.parent_subsystem_hash = None @@ -313,11 +317,11 @@ def __init__(self, subsystem, parent_cache=None): # TODO: if the value is found in the parent cache, store it in this # cache so we don't have to call `damaged_by_cut` over and over? def get(self, key): - '''Get a value from the cache. + """Get a value from the cache. - If the Mice cannot be found in this cache, try and find it in the + If the |MICE| cannot be found in this cache, try and find it in the parent cache. - ''' + """ mice = super().get(key) if mice is not None: # Hit @@ -335,24 +339,25 @@ def get(self, key): return None def set(self, key, value): - '''Only need to set if the subsystem is uncut. + """Only need to set if the subsystem is uncut. Caches are only inherited from uncut subsystems. - ''' + """ if not self.subsystem.is_cut: super().set(key, value) def key(self, direction, mechanism, purviews=False, _prefix=None): - '''Cache key. This is the call signature of |find_mice|''' + """Cache key. This is the call signature of |find_mice|""" return "subsys:{}:{}:{}:{}:{}".format( self.subsystem_hash, _prefix, direction, mechanism, purviews) -class DictMiceCache(DictCache): - '''A subsystem-local cache for |Mice| objects. +class DictMICECache(DictCache): + """A subsystem-local cache for |MICE| objects. + + See |MICECache| for more info. + """ - See |MiceCache| for more info. - ''' def __init__(self, subsystem, parent_cache=None): super().__init__() self.subsystem = subsystem @@ -362,41 +367,41 @@ def __init__(self, subsystem, parent_cache=None): self._build(parent_cache) def _build(self, parent_cache): - '''Build the initial cache from the parent. + """Build the initial cache from the parent. - Only include the Mice which are unaffected by the subsystem cut. - A Mice is affected if either the cut splits the mechanism + Only include the |MICE| which are unaffected by the subsystem cut. + A |MICE| is affected if either the cut splits the mechanism or splits the connections between the purview and mechanism - ''' + """ for key, mice in parent_cache.cache.items(): if not mice.damaged_by_cut(self.subsystem): self.cache[key] = mice def set(self, key, mice): - '''Set a value in the cache. + """Set a value in the cache. Only cache if: - The subsystem is uncut (caches are only inherited from uncut subsystems so there is no reason to cache on cut subsystems.) - - |phi| > 0. Ideally we would cache all mice, but the size + - |small_phi| > 0. Ideally we would cache all mice, but the size of the cache grows way too large, making parallel computations incredibly inefficient because the caches have to be passed between process. This will be changed once global caches are implemented. - Memory is not too full. - ''' - if (not self.subsystem.is_cut and mice.phi > 0 - and not memory_full()): + """ + if (not self.subsystem.is_cut and mice.phi > 0 and + not memory_full()): self.cache[key] = mice def key(self, direction, mechanism, purviews=False, _prefix=None): - '''Cache key. This is the call signature of |find_mice|''' + """Cache key. This is the call signature of |find_mice|""" return (_prefix, direction, mechanism, purviews) -def MiceCache(subsystem, parent_cache=None): - '''Construct a Mice cache. +def MICECache(subsystem, parent_cache=None): + """Construct a |MICE| cache. Uses either a Redis-backed cache or a local dict cache on the object. @@ -404,29 +409,29 @@ def MiceCache(subsystem, parent_cache=None): subsystem (Subsystem): The subsystem that this is a cache for. Kwargs: - parent_cache (MiceCache): The cache generated by the uncut - version of ``subsystem``. Any cached |Mice| which are + parent_cache (MICECache): The cache generated by the uncut + version of ``subsystem``. Any cached |MICE| which are unaffected by the cut are reused in this cache. If None, the cache is initialized empty. - ''' + """ if config.REDIS_CACHE: - cls = RedisMiceCache + cls = RedisMICECache else: - cls = DictMiceCache + cls = DictMICECache return cls(subsystem, parent_cache=parent_cache) class PurviewCache(DictCache): - '''A network-level cache for possible purviews.''' + """A network-level cache for possible purviews.""" def set(self, key, value): - '''Only set if purview caching is enabled''' + """Only set if purview caching is enabled""" if config.CACHE_POTENTIAL_PURVIEWS: self.cache[key] = value def method(cache_name, key_prefix=None): - '''Caching decorator for object-level method caches. + """Caching decorator for object-level method caches. Cache key generation is delegated to the cache. @@ -436,8 +441,11 @@ def method(cache_name, key_prefix=None): of this method. *key_prefix: A constant to use as part of the cache key in addition to the method arguments. - ''' + """ def decorator(func): + if (func.__name__ in ['cause_repertoire', 'effect_repertoire'] and + not config.CACHE_REPERTOIRES): + return func @wraps(func) def wrapper(obj, *args, **kwargs): cache = getattr(obj, cache_name) diff --git a/pyphi/compute/__init__.py b/pyphi/compute/__init__.py index fbc9023c7..8b631ed26 100644 --- a/pyphi/compute/__init__.py +++ b/pyphi/compute/__init__.py @@ -2,30 +2,34 @@ # -*- coding: utf-8 -*- # compute/__init__.py -'''Maintains backwards compatability with the old ``compute`` API. - -See :mod:`compute.concept` and :mod:`compute.big_phi` for documentation. +""" +See |compute.subsystem|, |compute.network|, |compute.distance|, and +|compute.parallel| for documentation. Attributes: - concept: Alias for :func:`concept.concept`. - conceptual_information: Alias for :func:`concept.conceptual_information`. - constellation: Alias for :func:`concept.constellation`. - concept_distance: Alias for :func:`distance.concept_distance`. - constellation_distance: Alias for :func:`distance.constellation_distance`. - all_complexes: Alias for :func:`big_phi.all_complexes`. - big_mip: Alias for :func:`big_phi.big_mip`. - big_phi: Alias for :func:`big_phi.big_phi`. - complexes: Alias for :func:`big_phi.complexes`. - condensed: Alias for :func:`big_phi.condensed`. - evaluate_cut: Alias for :func:`big_phi.evaluate_cut`. - main_complex: Alias for :func:`big_phi.main_complex`. - possible_complexes: Alias for :func:`big_phi.possible_complexes`. - subsystems: Alias for :func:`big_phi.subsystems`. -''' + all_complexes: Alias for :func:`pyphi.compute.network.all_complexes`. + ces: Alias for :func:`pyphi.compute.subsystem.ces`. + ces_distance: Alias for :func:`pyphi.compute.distance.ces_distance`. + complexes: Alias for :func:`pyphi.compute.network.complexes`. + concept_distance: Alias for + :func:`pyphi.compute.distance.concept_distance`. + conceptual_info: Alias for :func:`pyphi.compute.subsystem.conceptual_info`. + condensed: Alias for :func:`pyphi.compute.network.condensed`. + evaluate_cut: Alias for :func:`pyphi.compute.subsystem.evaluate_cut`. + major_complex: Alias for :func:`pyphi.compute.network.major_complex`. + phi: Alias for :func:`pyphi.compute.subsystem.phi`. + possible_complexes: Alias for + :func:`pyphi.compute.network.possible_complexes`. + sia: Alias for :func:`pyphi.compute.subsystem.sia`. + subsystems: Alias for :func:`pyphi.compute.network.subsystems`. +""" + +# pylint: disable=unused-import -from .big_phi import (all_complexes, big_mip, big_phi, complexes, condensed, - evaluate_cut, main_complex, possible_complexes, - subsystems, ConceptStyleSystem, big_mip_concept_style, - concept_cuts, BigMipConceptStyle) -from .concept import concept, conceptual_information, constellation -from .distance import concept_distance, constellation_distance +from .subsystem import (sia, phi, evaluate_cut, ConceptStyleSystem, + sia_concept_style, concept_cuts, + SystemIrreducibilityAnalysisConceptStyle, + conceptual_info, ces) +from .network import (all_complexes, complexes, condensed, major_complex, + possible_complexes, subsystems) +from .distance import concept_distance, ces_distance diff --git a/pyphi/compute/big_phi.py b/pyphi/compute/big_phi.py deleted file mode 100644 index 0c4e99d94..000000000 --- a/pyphi/compute/big_phi.py +++ /dev/null @@ -1,473 +0,0 @@ -# -*- coding: utf-8 -*- -# compute/big_phi.py - -''' -Functions for computing integrated information and finding complexes. -''' - -import functools -import logging -from time import time - -from .. import (Direction, config, connectivity, exceptions, memory, utils, - validate) -from ..models import BigMip, Concept, Cut, KCut, _null_bigmip, cmp, fmt -from ..partition import directed_bipartition, directed_bipartition_of_one -from ..subsystem import Subsystem, mip_partitions -from .concept import constellation -from .distance import constellation_distance -from .parallel import MapReduce - -# Create a logger for this module. -log = logging.getLogger(__name__) - - -def evaluate_cut(uncut_subsystem, cut, unpartitioned_constellation): - '''Find the |BigMip| for a given cut. - - Args: - uncut_subsystem (Subsystem): The subsystem without the cut applied. - cut (Cut): The cut to evaluate. - unpartitioned_constellation (Constellation): The constellation of the - uncut subsystem. - - Returns: - BigMip: The |BigMip| for that cut. - ''' - log.debug('Evaluating %s...', cut) - - cut_subsystem = uncut_subsystem.apply_cut(cut) - - if config.ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS: - mechanisms = unpartitioned_constellation.mechanisms - else: - # Mechanisms can only produce concepts if they were concepts in the - # original system, or the cut divides the mechanism. - mechanisms = set( - unpartitioned_constellation.mechanisms + - list(cut_subsystem.cut_mechanisms)) - - partitioned_constellation = constellation(cut_subsystem, mechanisms) - - log.debug('Finished evaluating %s.', cut) - - phi = constellation_distance(unpartitioned_constellation, - partitioned_constellation) - - return BigMip( - phi=phi, - unpartitioned_constellation=unpartitioned_constellation, - partitioned_constellation=partitioned_constellation, - subsystem=uncut_subsystem, - cut_subsystem=cut_subsystem) - - -class FindMip(MapReduce): - '''Computation engine for finding the minimal |BigMip|.''' - # pylint: disable=unused-argument,arguments-differ - - description = 'Evaluating {} cuts'.format(fmt.BIG_PHI) - - def empty_result(self, subsystem, unpartitioned_constellation): - '''Begin with a mip with infinite |big_phi|; all actual mips will have - less.''' - return _null_bigmip(subsystem, phi=float('inf')) - - @staticmethod - def compute(cut, subsystem, unpartitioned_constellation): - '''Evaluate a cut.''' - return evaluate_cut(subsystem, cut, unpartitioned_constellation) - - def process_result(self, new_mip, min_mip): - '''Check if the new mip has smaller phi than the standing result.''' - if new_mip.phi == 0: - self.done = True # Short-circuit - return new_mip - - elif new_mip < min_mip: - return new_mip - - return min_mip - - -def big_mip_bipartitions(nodes): - '''Return all |big_phi| cuts for the given nodes. - - This value changes based on :const:`config.CUT_ONE_APPROXIMATION`. - - Args: - nodes (tuple[int]): The node indices to partition. - Returns: - list[Cut]: All unidirectional partitions. - ''' - if config.CUT_ONE_APPROXIMATION: - bipartitions = directed_bipartition_of_one(nodes) - else: - # Don't consider trivial partitions where one part is empty - bipartitions = directed_bipartition(nodes, nontrivial=True) - - return [Cut(bipartition[0], bipartition[1]) - for bipartition in bipartitions] - - -def _unpartitioned_constellation(subsystem): - '''Parallelize the unpartitioned constellation if parallelizing cuts, - since we have free processors because we're not computing any cuts yet.''' - return constellation(subsystem, parallel=config.PARALLEL_CUT_EVALUATION) - - -# pylint: disable=unused-argument -@memory.cache(ignore=["subsystem"]) -def _big_mip(cache_key, subsystem): - '''Return the minimal information partition of a subsystem. - - Args: - subsystem (Subsystem): The candidate set of nodes. - - Returns: - BigMip: A nested structure containing all the data from the - intermediate calculations. The top level contains the basic MIP - information for the given subsystem. - ''' - log.info('Calculating big-phi data for %s...', subsystem) - start = time() - - def time_annotated(bm, small_phi_time=0.0): - '''Annote a BigMip with the total elapsed calculation time. - - Optionally add the time taken to calculate the unpartitioned - constellation. - ''' - bm.time = round(time() - start, config.PRECISION) - bm.small_phi_time = round(small_phi_time, config.PRECISION) - return bm - - # Check for degenerate cases - # ========================================================================= - # Phi is necessarily zero if the subsystem is: - # - not strongly connected; - # - empty; - # - an elementary micro mechanism (i.e. no nontrivial bipartitions). - # So in those cases we immediately return a null MIP. - if not subsystem: - log.info('Subsystem %s is empty; returning null MIP ' - 'immediately.', subsystem) - return time_annotated(_null_bigmip(subsystem)) - - if not connectivity.is_strong(subsystem.cm, subsystem.node_indices): - log.info('%s is not strongly connected; returning null MIP ' - 'immediately.', subsystem) - return time_annotated(_null_bigmip(subsystem)) - - # Handle elementary micro mechanism cases. - # Single macro element systems have nontrivial bipartitions because their - # bipartitions are over their micro elements. - if len(subsystem.cut_indices) == 1: - # If the node lacks a self-loop, phi is trivially zero. - if not subsystem.cm[subsystem.node_indices][subsystem.node_indices]: - log.info('Single micro nodes %s without selfloops cannot have phi; ' - 'returning null MIP immediately.', subsystem) - return time_annotated(_null_bigmip(subsystem)) - # Even if the node has a self-loop, we may still define phi to be zero. - elif not config.SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI: - log.info('Single micro nodes %s with selfloops cannot have phi; ' - 'returning null MIP immediately.', subsystem) - return time_annotated(_null_bigmip(subsystem)) - # ========================================================================= - - log.debug('Finding unpartitioned constellation...') - small_phi_start = time() - unpartitioned_constellation = _unpartitioned_constellation(subsystem) - small_phi_time = round(time() - small_phi_start, config.PRECISION) - - if not unpartitioned_constellation: - log.info('Empty unpartitioned constellation; returning null MIP ' - 'immediately.') - # Short-circuit if there are no concepts in the unpartitioned - # constellation. - return time_annotated(_null_bigmip(subsystem)) - - log.debug('Found unpartitioned constellation.') - if len(subsystem.cut_indices) == 1: - cuts = [Cut(subsystem.cut_indices, subsystem.cut_indices)] - else: - cuts = big_mip_bipartitions(subsystem.cut_indices) - finder = FindMip(cuts, subsystem, unpartitioned_constellation) - min_mip = finder.run(config.PARALLEL_CUT_EVALUATION) - result = time_annotated(min_mip, small_phi_time) - - log.info('Finished calculating big-phi data for %s.', subsystem) - - return result - - -# TODO(maintainance): don't forget to add any new configuration options here if -# they can change big-phi values -def _big_mip_cache_key(subsystem): - '''The cache key of the subsystem. - - This includes the native hash of the subsystem and all configuration values - which change the results of ``big_mip``. - ''' - return ( - hash(subsystem), - config.ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS, - config.CUT_ONE_APPROXIMATION, - config.MEASURE, - config.PRECISION, - config.VALIDATE_SUBSYSTEM_STATES, - config.SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI, - config.PARTITION_TYPE, - ) - - -# Wrapper to ensure that the cache key is the native hash of the subsystem, so -# joblib doesn't mistakenly recompute things when the subsystem's MICE cache is -# changed. The cache is also keyed on configuration values which affect the -# value of the computation. -@functools.wraps(_big_mip) -def big_mip(subsystem): # pylint: disable=missing-docstring - if config.SYSTEM_CUTS == 'CONCEPT_STYLE': - return big_mip_concept_style(subsystem) - - return _big_mip(_big_mip_cache_key(subsystem), subsystem) - - -def big_phi(subsystem): - '''Return the |big_phi| value of a subsystem.''' - return big_mip(subsystem).phi - - -def _reachable_subsystems(network, indices, state): - '''A generator over all subsystems in a valid state.''' - validate.is_network(network) - - # Return subsystems largest to smallest to optimize parallel - # resource usage. - for subset in utils.powerset(indices, nonempty=True, reverse=True): - try: - yield Subsystem(network, state, subset) - except exceptions.StateUnreachableError: - pass - - -def subsystems(network, state): - '''Return a generator of all **possible** subsystems of a network. - - Does not return subsystems that are in an impossible state. - ''' - return _reachable_subsystems(network, network.node_indices, state) - - -def possible_complexes(network, state): - '''Return a generator of subsystems of a network that could be a complex. - - This is the just powerset of the nodes that have at least one input and - output (nodes with no inputs or no outputs cannot be part of a main - complex, because they do not have a causal link with the rest of the - subsystem in the past or future, respectively). - - Does not include subsystems in an impossible state. - - Args: - network (Network): The network for which to return possible complexes. - state (tuple[int]): The state of the network. - - Yields: - Subsystem: The next subsystem which could be a complex. - ''' - return _reachable_subsystems(network, network.causally_significant_nodes, - state) - -class FindAllComplexes(MapReduce): - '''Computation engine for computing all complexes''' - # pylint: disable=unused-argument,arguments-differ - description = 'Finding complexes' - - def empty_result(self): - return [] - - @staticmethod - def compute(subsystem): - return big_mip(subsystem) - - def process_result(self, new_big_mip, big_mips): - big_mips.append(new_big_mip) - return big_mips - - -def all_complexes(network, state): - '''Return a generator for all complexes of the network. - - Includes reducible, zero-|big_phi| complexes (which are not, strictly - speaking, complexes at all). - ''' - engine = FindAllComplexes(subsystems(network, state)) - return engine.run(config.PARALLEL_COMPLEX_EVALUATION) - - -class FindIrreducibleComplexes(FindAllComplexes): - '''Computation engine for computing irreducible complexes of a network.''' - - def process_result(self, new_big_mip, big_mips): - if new_big_mip.phi > 0: - big_mips.append(new_big_mip) - return big_mips - - -def complexes(network, state): - '''Return all irreducible complexes of the network.''' - engine = FindIrreducibleComplexes(possible_complexes(network, state)) - return engine.run(config.PARALLEL_COMPLEX_EVALUATION) - - -def main_complex(network, state): - '''Return the main complex of the network.''' - log.info('Calculating main complex...') - - result = complexes(network, state) - if result: - result = max(result) - else: - empty_subsystem = Subsystem(network, state, ()) - result = _null_bigmip(empty_subsystem) - - log.info("Finished calculating main complex.") - - return result - - -def condensed(network, state): - '''Return the set of maximal non-overlapping complexes.''' - result = [] - covered_nodes = set() - - for c in reversed(sorted(complexes(network, state))): - if not any(n in covered_nodes for n in c.subsystem.node_indices): - result.append(c) - covered_nodes = covered_nodes | set(c.subsystem.node_indices) - - return result - - -class ConceptStyleSystem: - """A functional replacement for ``Subsystem`` implementing concept-style - system cuts. - """ - def __init__(self, subsystem, direction, cut=None): - self.subsystem = subsystem - self.direction = direction - self.cut = cut - self.cut_system = subsystem.apply_cut(cut) - - def apply_cut(self, cut): - return ConceptStyleSystem(self.subsystem, self.direction, cut) - - def __getattr__(self, name): - """Pass attribute access through to the basic subsystem.""" - # Unpickling calls `__getattr__` before the object's dict is populated; - # check that `subsystem` exists to avoid a recursion error. - # See https://bugs.python.org/issue5370. - if 'subsystem' in self.__dict__: - return getattr(self.subsystem, name) - raise AttributeError(name) - - def __len__(self): - return len(self.subsystem) - - @property - def cause_system(self): - return { - Direction.PAST: self.cut_system, - Direction.FUTURE: self.subsystem - }[self.direction] - - @property - def effect_system(self): - return { - Direction.PAST: self.subsystem, - Direction.FUTURE: self.cut_system - }[self.direction] - - def concept(self, mechanism, purviews=False, past_purviews=False, - future_purviews=False): - '''Compute a concept, using the appropriate system for each side of - the cut.''' - cause = self.cause_system.core_cause( - mechanism, purviews=(past_purviews or purviews)) - - effect = self.effect_system.core_effect( - mechanism, purviews=(future_purviews or purviews)) - - return Concept(mechanism=mechanism, cause=cause, effect=effect, - subsystem=self) - - def __str__(self): - return 'ConceptStyleSystem{}'.format(self.node_indices) - - -def concept_cuts(direction, node_indices): - '''Generator over all concept-syle cuts for these nodes.''' - for partition in mip_partitions(node_indices, node_indices): - yield KCut(direction, partition) - - -def directional_big_mip(subsystem, direction, unpartitioned_constellation=None): - """Calculate a concept-style BigMipPast or BigMipFuture.""" - if unpartitioned_constellation is None: - unpartitioned_constellation = _unpartitioned_constellation(subsystem) - - c_system = ConceptStyleSystem(subsystem, direction) - cuts = concept_cuts(direction, c_system.cut_indices) - - # Run the default MIP finder - # TODO: verify that short-cutting works correctly? - finder = FindMip(cuts, c_system, unpartitioned_constellation) - return finder.run(config.PARALLEL_CUT_EVALUATION) - - -# TODO: only return the minimal mip, instead of both -class BigMipConceptStyle(cmp.Orderable): - '''Represents a Big Mip computed using concept-style system cuts.''' - - def __init__(self, mip_past, mip_future): - self.big_mip_past = mip_past - self.big_mip_future = mip_future - - @property - def min_mip(self): - return min(self.big_mip_past, self.big_mip_future, key=lambda m: m.phi) - - def __getattr__(self, name): - '''Pass attribute access through to the minimal mip.''' - if ('big_mip_past' in self.__dict__ and - 'big_mip_future' in self.__dict__): - return getattr(self.min_mip, name) - raise AttributeError(name) - - def __eq__(self, other): - return cmp.general_eq(self, other, ['phi']) - - unorderable_unless_eq = ['network'] - - def order_by(self): - return [self.phi, len(self.subsystem)] - - def __repr__(self): - return repr(self.min_mip) - - def __str__(self): - return str(self.min_mip) - - -# TODO: cache -def big_mip_concept_style(subsystem): - '''Compute a concept-style Big Mip''' - unpartitioned_constellation = _unpartitioned_constellation(subsystem) - - mip_past = directional_big_mip(subsystem, Direction.PAST, - unpartitioned_constellation) - mip_future = directional_big_mip(subsystem, Direction.FUTURE, - unpartitioned_constellation) - - return BigMipConceptStyle(mip_past, mip_future) diff --git a/pyphi/compute/concept.py b/pyphi/compute/concept.py deleted file mode 100644 index 30e251326..000000000 --- a/pyphi/compute/concept.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# compute/concept.py - -''' -Functions for computing concepts and constellations of concepts. -''' - -# pylint: disable=too-many-arguments,redefined-outer-name - -import logging -from time import time - -from . import parallel -from .. import config, models, utils -from .distance import constellation_distance - -log = logging.getLogger(__name__) - - -def concept(subsystem, mechanism, purviews=False, past_purviews=False, - future_purviews=False): - '''Return the concept specified by a mechanism within a subsytem. - - Args: - subsystem (Subsystem): The context in which the mechanism should be - considered. - mechanism (tuple[int]): The candidate set of nodes. - - Keyword Args: - purviews (tuple[tuple[int]]): Restrict the possible purviews to those - in this list. - past_purviews (tuple[tuple[int]]): Restrict the possible cause - purviews to those in this list. Takes precedence over ``purviews``. - future_purviews (tuple[tuple[int]]): Restrict the possible effect - purviews to those in this list. Takes precedence over ``purviews``. - - Returns: - Concept: The pair of maximally irreducible cause/effect repertoires - that constitute the concept specified by the given mechanism. - ''' - start = time() - log.debug('Computing concept %s...', mechanism) - - # If the mechanism is empty, there is no concept. - if not mechanism: - result = subsystem.null_concept - else: - result = subsystem.concept( - mechanism, purviews=purviews, past_purviews=past_purviews, - future_purviews=future_purviews) - - result.time = round(time() - start, config.PRECISION) - log.debug('Found concept %s', mechanism) - return result - - -class ComputeConstellation(parallel.MapReduce): - '''Engine for computing a constellation.''' - # pylint: disable=unused-argument,arguments-differ - - description = 'Computing concepts' - - def empty_result(self, *args): - return [] - - @staticmethod - def compute(mechanism, subsystem, purviews, past_purviews, - future_purviews): - '''Compute a concept for a mechanism, in this subsystem with the - provided purviews.''' - return concept(subsystem, mechanism, purviews=purviews, - past_purviews=past_purviews, - future_purviews=future_purviews) - - def process_result(self, new_concept, concepts): - '''Save all concepts with non-zero phi to the constellation.''' - if new_concept.phi > 0: - concepts.append(new_concept) - return concepts - - -def constellation(subsystem, mechanisms=False, purviews=False, - past_purviews=False, future_purviews=False, parallel=False): - '''Return the conceptual structure of this subsystem, optionally restricted - to concepts with the mechanisms and purviews given in keyword arguments. - - If you don't need the full constellation, restricting the possible - mechanisms and purviews can make this function much faster. - - Args: - subsystem (Subsystem): The subsystem for which to determine the - constellation. - - Keyword Args: - mechanisms (tuple[tuple[int]]): Restrict possible mechanisms to those - in this list. - purviews (tuple[tuple[int]]): Same as in :func:`concept`. - past_purviews (tuple[tuple[int]]): Same as in :func:`concept`. - future_purviews (tuple[tuple[int]]): Same as in :func:`concept`. - parallel (bool): Whether to compute concepts in parallel. If ``True``, - overrides :data:`config.PARALLEL_CONCEPT_EVALUATION`. - - Returns: - Constellation: A tuple of every |Concept| in the constellation. - ''' - if mechanisms is False: - mechanisms = utils.powerset(subsystem.node_indices, nonempty=True) - - engine = ComputeConstellation(mechanisms, subsystem, purviews, - past_purviews, future_purviews) - - return models.Constellation(engine.run(parallel or - config.PARALLEL_CONCEPT_EVALUATION)) - - -def conceptual_information(subsystem): - '''Return the conceptual information for a subsystem. - - This is the distance from the subsystem's constellation to the null - concept. - ''' - ci = constellation_distance(constellation(subsystem), ()) - return round(ci, config.PRECISION) diff --git a/pyphi/compute/distance.py b/pyphi/compute/distance.py index b78799fc7..0742b8b2d 100644 --- a/pyphi/compute/distance.py +++ b/pyphi/compute/distance.py @@ -2,19 +2,19 @@ # -*- coding: utf-8 -*- # compute/distance.py -''' +""" Functions for computing distances between various PyPhi objects. -''' +""" import numpy as np from .. import config, utils -from ..distance import big_phi_measure as measure +from ..distance import system_repertoire_distance as repertoire_distance from ..distance import emd def concept_distance(c1, c2): - '''Return the distance between two concepts in concept space. + """Return the distance between two concepts in concept space. Args: c1 (Concept): The first concept. @@ -22,26 +22,26 @@ def concept_distance(c1, c2): Returns: float: The distance between the two concepts in concept space. - ''' - # Calculate the sum of the past and future EMDs, expanding the repertoires + """ + # Calculate the sum of the cause and effect EMDs, expanding the repertoires # to the combined purview of the two concepts, so that the EMD signatures # are the same size. cause_purview = tuple(set(c1.cause.purview + c2.cause.purview)) effect_purview = tuple(set(c1.effect.purview + c2.effect.purview)) # Take the sum - return (measure(c1.expand_cause_repertoire(cause_purview), - c2.expand_cause_repertoire(cause_purview)) + - measure(c1.expand_effect_repertoire(effect_purview), - c2.expand_effect_repertoire(effect_purview))) + return (repertoire_distance(c1.expand_cause_repertoire(cause_purview), + c2.expand_cause_repertoire(cause_purview)) + + repertoire_distance(c1.expand_effect_repertoire(effect_purview), + c2.expand_effect_repertoire(effect_purview))) -def _constellation_distance_simple(C1, C2): - '''Return the distance between two constellations in concept space. +def _ces_distance_simple(C1, C2): + """Return the distance between two cause-effect structures. Assumes the only difference between them is that some concepts have disappeared. - ''' - # Make C1 refer to the bigger constellation. + """ + # Make C1 refer to the bigger CES. if len(C2) > len(C1): C1, C2 = C2, C1 destroyed = [c1 for c1 in C1 if not any(c1.emd_eq(c2) for c2 in C2)] @@ -49,26 +49,26 @@ def _constellation_distance_simple(C1, C2): for c in destroyed) -def _constellation_distance_emd(unique_C1, unique_C2): - '''Return the distance between two constellations in concept space. +def _ces_distance_emd(unique_C1, unique_C2): + """Return the distance between two cause-effect structures. Uses the generalized EMD. - ''' + """ # Get the pairwise distances between the concepts in the unpartitioned and - # partitioned constellations. + # partitioned CESs. distances = np.array([ [concept_distance(i, j) for j in unique_C2] for i in unique_C1 ]) # We need distances from all concepts---in both the unpartitioned and - # partitioned constellations---to the null concept, because: - # - often a concept in the unpartitioned constellation is destroyed by a + # partitioned CESs---to the null concept, because: + # - often a concept in the unpartitioned CES is destroyed by a # cut (and needs to be moved to the null concept); and # - in certain cases, the partitioned system will have *greater* sum of # small-phi, even though it has less big-phi, which means that some - # partitioned-constellation concepts will be moved to the null concept. + # partitioned-CES concepts will be moved to the null concept. distances_to_null = np.array([ concept_distance(c, c.subsystem.null_concept) - for constellation in (unique_C1, unique_C2) for c in constellation + for ces in (unique_C1, unique_C2) for c in ces ]) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Now we make the distance matrix, which will look like this: @@ -88,17 +88,16 @@ def _constellation_distance_emd(unique_C1, unique_C2): # # The diagonal blocks marked with an X are set to a value larger than any # pairwise distance between concepts. This ensures that concepts are never - # moved to another concept within their own constellation; they must always - # go either from one constellation to another, or to the null concept N. - # The D block is filled with the pairwise distances between the two - # constellations, and Dn is filled with the distances from each concept to - # the null concept. + # moved to another concept within their own CES; they must always go either + # from one CES to another, or to the null concept N. The D block is filled + # with the pairwise distances between the two CESs, and Dn is filled with + # the distances from each concept to the null concept. N, M = len(unique_C1), len(unique_C2) # Add one to the side length for the null concept distances. distance_matrix = np.empty([N + M + 1] * 2) - # Ensure that concepts are never moved within their own constellation. + # Ensure that concepts are never moved within their own CES. distance_matrix[:] = np.max(distances) + 1 - # Set the top-right block to the pairwise constellation distances. + # Set the top-right block to the pairwise CES distances. distance_matrix[:N, N:-1] = distances # Set the bottom-left block to the same, but transposed. distance_matrix[N:-1, :N] = distances.T @@ -119,34 +118,34 @@ def _constellation_distance_emd(unique_C1, unique_C2): return emd(np.array(d1), np.array(d2), distance_matrix) -def constellation_distance(C1, C2): - '''Return the distance between two constellations in concept space. +def ces_distance(C1, C2): + """Return the distance between two cause-effect structures. Args: - C1 (Constellation): The first constellation. - C2 (Constellation): The second constellation. + C1 (CauseEffectStructure): The first |CauseEffectStructure|. + C2 (CauseEffectStructure): The second |CauseEffectStructure|. Returns: - float: The distance between the two constellations in concept space. - ''' - if config.USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE: - return round(small_phi_constellation_distance(C1, C2), config.PRECISION) + float: The distance between the two cause-effect structures in concept + space. + """ + if config.USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE: + return round(small_phi_ces_distance(C1, C2), config.PRECISION) concepts_only_in_C1 = [ c1 for c1 in C1 if not any(c1.emd_eq(c2) for c2 in C2)] concepts_only_in_C2 = [ c2 for c2 in C2 if not any(c2.emd_eq(c1) for c1 in C1)] - # If the only difference in the constellations is that some concepts + # If the only difference in the CESs is that some concepts # disappeared, then we don't need to use the EMD. if not concepts_only_in_C1 or not concepts_only_in_C2: - dist = _constellation_distance_simple(C1, C2) + dist = _ces_distance_simple(C1, C2) else: - dist = _constellation_distance_emd(concepts_only_in_C1, - concepts_only_in_C2) + dist = _ces_distance_emd(concepts_only_in_C1, concepts_only_in_C2) return round(dist, config.PRECISION) -def small_phi_constellation_distance(C1, C2): - '''Return the difference in |small_phi| between constellations.''' +def small_phi_ces_distance(C1, C2): + """Return the difference in |small_phi| between |CauseEffectStructure|.""" return sum(c.phi for c in C1) - sum(c.phi for c in C2) diff --git a/pyphi/compute/network.py b/pyphi/compute/network.py new file mode 100644 index 000000000..647b215eb --- /dev/null +++ b/pyphi/compute/network.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# compute/network.py + +""" +Functions for computing network-level properties. +""" + +import logging + +from .. import config, exceptions, utils, validate +from ..models import _null_sia +from ..subsystem import Subsystem +from .parallel import MapReduce +from .subsystem import sia + +# Create a logger for this module. +log = logging.getLogger(__name__) + + +def _reachable_subsystems(network, indices, state): + """A generator over all subsystems in a valid state.""" + validate.is_network(network) + + # Return subsystems largest to smallest to optimize parallel + # resource usage. + for subset in utils.powerset(indices, nonempty=True, reverse=True): + try: + yield Subsystem(network, state, subset) + except exceptions.StateUnreachableError: + pass + + +def subsystems(network, state): + """Return a generator of all **possible** subsystems of a network. + + .. note:: + Does not return subsystems that are in an impossible state (after + conditioning the subsystem TPM on the state of the other nodes). + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Yields: + Subsystem: A |Subsystem| for each subset of nodes in the network, + excluding subsystems that would be in an impossible state. + """ + return _reachable_subsystems(network, network.node_indices, state) + + +def possible_complexes(network, state): + """Return a generator of subsystems of a network that could be a complex. + + This is the just powerset of the nodes that have at least one input and + output (nodes with no inputs or no outputs cannot be part of a main + complex, because they do not have a causal link with the rest of the + subsystem in the previous or next timestep, respectively). + + .. note:: + Does not return subsystems that are in an impossible state (after + conditioning the subsystem TPM on the state of the other nodes). + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Yields: + Subsystem: The next subsystem that could be a complex. + """ + return _reachable_subsystems( + network, network.causally_significant_nodes, state) + + +class FindAllComplexes(MapReduce): + """Computation engine for finding all complexes.""" + # pylint: disable=unused-argument,arguments-differ + + description = 'Finding complexes' + + def empty_result(self): + return [] + + @staticmethod + def compute(subsystem): + return sia(subsystem) + + def process_result(self, new_sia, sias): + sias.append(new_sia) + return sias + + +def all_complexes(network, state): + """Return a generator for all complexes of the network. + + .. note:: + Includes reducible, zero-|big_phi| complexes (which are not, strictly + speaking, complexes at all). + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Yields: + SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the + |Network|. + """ + engine = FindAllComplexes(subsystems(network, state)) + return engine.run(config.PARALLEL_COMPLEX_EVALUATION) + + +class FindIrreducibleComplexes(FindAllComplexes): + """Computation engine for finding irreducible complexes of a network.""" + + def process_result(self, new_sia, sias): + if new_sia.phi > 0: + sias.append(new_sia) + return sias + + +def complexes(network, state): + """Return all irreducible complexes of the network. + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Yields: + SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the + |Network|, excluding those with |big_phi = 0|. + """ + engine = FindIrreducibleComplexes(possible_complexes(network, state)) + return engine.run(config.PARALLEL_COMPLEX_EVALUATION) + + +def major_complex(network, state): + """Return the major complex of the network. + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Returns: + SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with + maximal |big_phi|. + """ + log.info('Calculating major complex...') + + result = complexes(network, state) + if result: + result = max(result) + else: + empty_subsystem = Subsystem(network, state, ()) + result = _null_sia(empty_subsystem) + + log.info("Finished calculating major complex.") + + return result + + +def condensed(network, state): + """Return a list of maximal non-overlapping complexes. + + Args: + network (Network): The |Network| of interest. + state (tuple[int]): The state of the network (a binary tuple). + + Returns: + list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping + complexes with maximal |big_phi| values. + """ + result = [] + covered_nodes = set() + + for c in reversed(sorted(complexes(network, state))): + if not any(n in covered_nodes for n in c.subsystem.node_indices): + result.append(c) + covered_nodes = covered_nodes | set(c.subsystem.node_indices) + + return result diff --git a/pyphi/compute/parallel.py b/pyphi/compute/parallel.py index 77539abd4..c1d2dd464 100644 --- a/pyphi/compute/parallel.py +++ b/pyphi/compute/parallel.py @@ -3,9 +3,9 @@ # compute/parallel.py -''' +""" Utilities for parallel computation. -''' +""" # pylint: disable=too-few-public-methods,too-many-instance-attributes @@ -24,7 +24,7 @@ def get_num_processes(): - '''Return the number of processes to use in parallel.''' + """Return the number of processes to use in parallel.""" cpu_count = multiprocessing.cpu_count() if config.NUMBER_OF_CORES == 0: @@ -50,19 +50,20 @@ def get_num_processes(): class ExceptionWrapper: - '''A picklable wrapper suitable for passing exception tracebacks through + """A picklable wrapper suitable for passing exception tracebacks through instances of ``multiprocessing.Queue``. Args: exception (Exception): The exception to wrap. - ''' + """ + def __init__(self, exception): # coverage: disable self.exception = exception _, _, tb = sys.exc_info() self.tb = Traceback(tb) def reraise(self): - '''Re-raise the exception.''' + """Re-raise the exception.""" raise self.exception.with_traceback(self.tb.as_traceback()) @@ -71,7 +72,7 @@ def reraise(self): class MapReduce: - '''An engine for doing heavy computations over an iterable. + """An engine for doing heavy computations over an iterable. This is similar to ``multiprocessing.Pool``, but allows computations to shortcircuit, and supports both parallel and sequential computations. @@ -97,7 +98,8 @@ class MapReduce: aware of this when composing nested computations. This is not an issue in practice because it is typically most efficient to only parallelize the top level computation. - ''' + """ + # Description for the tqdm progress bar description = '' @@ -118,16 +120,16 @@ def __init__(self, iterable, *context): self.complete = None def empty_result(self, *context): - '''Return the default result with which to begin the computation.''' + """Return the default result with which to begin the computation.""" raise NotImplementedError @staticmethod def compute(obj, *context): - '''Map over a single object from ``self.iterable``.''' + """Map over a single object from ``self.iterable``.""" raise NotImplementedError def process_result(self, new_result, old_result): - '''Reduce handler. + """Reduce handler. Every time a new result is generated by ``compute``, this method is called with the result and the previous (accumulated) result. This @@ -135,7 +137,7 @@ def process_result(self, new_result, old_result): Setting ``self.done`` to ``True`` in this method will abort the remainder of the computation, returning this final result. - ''' + """ raise NotImplementedError #: Is this process a subprocess in a parallel computation? @@ -143,7 +145,7 @@ def process_result(self, new_result, old_result): # TODO: pass size of iterable alongside? def init_progress_bar(self): - '''Initialize and return a progress bar.''' + """Initialize and return a progress bar.""" # Forked worker processes can't show progress bars. disable = MapReduce._forked or not config.PROGRESS_BARS @@ -159,8 +161,9 @@ def init_progress_bar(self): desc=self.description) @staticmethod # coverage: disable - def worker(compute, task_queue, result_queue, log_queue, complete, *context): - '''A worker process, run by ``multiprocessing.Process``.''' + def worker(compute, task_queue, result_queue, log_queue, complete, + *context): + """A worker process, run by ``multiprocessing.Process``.""" try: MapReduce._forked = True log.debug('Worker process starting...') @@ -183,9 +186,9 @@ def worker(compute, task_queue, result_queue, log_queue, complete, *context): result_queue.put(ExceptionWrapper(e)) def start_parallel(self): - '''Initialize all queues and start the worker processes and the log + """Initialize all queues and start the worker processes and the log thread. - ''' + """ self.num_processes = get_num_processes() self.task_queue = multiprocessing.Queue(maxsize=Q_MAX_SIZE) @@ -211,11 +214,11 @@ def start_parallel(self): self.initialize_tasks() def initialize_tasks(self): - '''Load the input queue to capacity. + """Load the input queue to capacity. Overfilling causes a deadlock when `queue.put` blocks when full, so further tasks are enqueued as results are returned. - ''' + """ # Add a poison pill to shutdown each process. self.tasks = chain(self.iterable, [POISON_PILL] * self.num_processes) for task in islice(self.tasks, Q_MAX_SIZE): @@ -223,7 +226,7 @@ def initialize_tasks(self): self.task_queue.put(task) def maybe_put_task(self): - '''Enqueue the next task, if there are any waiting.''' + """Enqueue the next task, if there are any waiting.""" try: task = next(self.tasks) except StopIteration: @@ -233,9 +236,9 @@ def maybe_put_task(self): self.task_queue.put(task) def run_parallel(self): - '''Perform the computation in parallel, reading results from the output + """Perform the computation in parallel, reading results from the output queue and passing them to ``process_result``. - ''' + """ self.start_parallel() result = self.empty_result(*self.context) @@ -263,7 +266,7 @@ def run_parallel(self): return result def finish_parallel(self): - '''Terminate all processes and the log thread.''' + """Terminate all processes and the log thread.""" for process in self.processes: process.join() @@ -283,9 +286,9 @@ def finish_parallel(self): self.progress.close() def run_sequential(self): - '''Perform the computation sequentially, only holding two computed + """Perform the computation sequentially, only holding two computed objects in memory at a time. - ''' + """ result = self.empty_result(*self.context) for obj in self.iterable: @@ -303,12 +306,12 @@ def run_sequential(self): return result def run(self, parallel=True): - '''Perform the computation. + """Perform the computation. Keyword Args: parallel (boolean): If True, run the computation in parallel. Otherwise, operate sequentially. - ''' + """ if parallel: return self.run_parallel() return self.run_sequential() @@ -316,11 +319,12 @@ def run(self, parallel=True): # TODO: maintain a single log thread? class LogThread(threading.Thread): - '''Thread which handles log records sent from ``MapReduce`` processes. + """Thread which handles log records sent from ``MapReduce`` processes. It listens to an instance of ``multiprocessing.Queue``, rewriting log messages to the PyPhi log handler. - ''' + """ + def __init__(self, q): self.q = q super().__init__() @@ -338,7 +342,7 @@ def run(self): def configure_worker_logging(queue): # coverage: disable - '''Configure a worker process to log all messages to ``queue``.''' + """Configure a worker process to log all messages to ``queue``.""" logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, diff --git a/pyphi/compute/subsystem.py b/pyphi/compute/subsystem.py new file mode 100644 index 000000000..1de6d592c --- /dev/null +++ b/pyphi/compute/subsystem.py @@ -0,0 +1,451 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# compute/subsystem.py + +""" +Functions for computing subsystem-level properties. +""" + +import functools +import logging +from time import time + +from .. import Direction, config, connectivity, memory, utils +from ..models import (CauseEffectStructure, Concept, Cut, KCut, + SystemIrreducibilityAnalysis, _null_sia, cmp, fmt) +from ..partition import directed_bipartition, directed_bipartition_of_one +from ..subsystem import mip_partitions +from .distance import ces_distance +from .parallel import MapReduce + +# Create a logger for this module. +log = logging.getLogger(__name__) + + +class ComputeCauseEffectStructure(MapReduce): + """Engine for computing a |CauseEffectStructure|.""" + # pylint: disable=unused-argument,arguments-differ + + description = 'Computing concepts' + + def empty_result(self, *args): + return [] + + @staticmethod + def compute(mechanism, subsystem, purviews, cause_purviews, + effect_purviews): + """Compute a |Concept| for a mechanism, in this |Subsystem| with the + provided purviews. + """ + return subsystem.concept(mechanism, + purviews=purviews, + cause_purviews=cause_purviews, + effect_purviews=effect_purviews) + + def process_result(self, new_concept, concepts): + """Save all concepts with non-zero |small_phi| to the + |CauseEffectStructure|. + """ + if new_concept.phi > 0: + concepts.append(new_concept) + return concepts + + +def ces(subsystem, mechanisms=False, purviews=False, cause_purviews=False, + effect_purviews=False, parallel=False): + """Return the conceptual structure of this subsystem, optionally restricted + to concepts with the mechanisms and purviews given in keyword arguments. + + If you don't need the full |CauseEffectStructure|, restricting the possible + mechanisms and purviews can make this function much faster. + + Args: + subsystem (Subsystem): The subsystem for which to determine the + |CauseEffectStructure|. + + Keyword Args: + mechanisms (tuple[tuple[int]]): Restrict possible mechanisms to those + in this list. + purviews (tuple[tuple[int]]): Same as in :func:`concept`. + cause_purviews (tuple[tuple[int]]): Same as in :func:`concept`. + effect_purviews (tuple[tuple[int]]): Same as in :func:`concept`. + parallel (bool): Whether to compute concepts in parallel. If ``True``, + overrides :data:`config.PARALLEL_CONCEPT_EVALUATION`. + + Returns: + CauseEffectStructure: A tuple of every |Concept| in the cause-effect + structure. + """ + if mechanisms is False: + mechanisms = utils.powerset(subsystem.node_indices, nonempty=True) + + engine = ComputeCauseEffectStructure(mechanisms, subsystem, purviews, + cause_purviews, effect_purviews) + + return CauseEffectStructure(engine.run(parallel or + config.PARALLEL_CONCEPT_EVALUATION)) + + +def conceptual_info(subsystem): + """Return the conceptual information for a |Subsystem|. + + This is the distance from the subsystem's |CauseEffectStructure| to the + null concept. + """ + ci = ces_distance(ces(subsystem), ()) + return round(ci, config.PRECISION) + + +def evaluate_cut(uncut_subsystem, cut, unpartitioned_ces): + """Compute the system irreducibility for a given cut. + + Args: + uncut_subsystem (Subsystem): The subsystem without the cut applied. + cut (Cut): The cut to evaluate. + unpartitioned_ces (CauseEffectStructure): The cause-effect structure of + the uncut subsystem. + + Returns: + SystemIrreducibilityAnalysis: The |SystemIrreducibilityAnalysis| for + that cut. + """ + log.debug('Evaluating %s...', cut) + + cut_subsystem = uncut_subsystem.apply_cut(cut) + + if config.ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS: + mechanisms = unpartitioned_ces.mechanisms + else: + # Mechanisms can only produce concepts if they were concepts in the + # original system, or the cut divides the mechanism. + mechanisms = set( + unpartitioned_ces.mechanisms + + list(cut_subsystem.cut_mechanisms)) + + partitioned_ces = ces(cut_subsystem, mechanisms) + + log.debug('Finished evaluating %s.', cut) + + phi = ces_distance(unpartitioned_ces, + partitioned_ces) + + return SystemIrreducibilityAnalysis( + phi=phi, + ces=unpartitioned_ces, + partitioned_ces=partitioned_ces, + subsystem=uncut_subsystem, + cut_subsystem=cut_subsystem) + + +class ComputeSystemIrreducibility(MapReduce): + """Computation engine for system-level irreducibility.""" + # pylint: disable=unused-argument,arguments-differ + + description = 'Evaluating {} cuts'.format(fmt.BIG_PHI) + + def empty_result(self, subsystem, ces): + """Begin with a |SIA| with infinite |big_phi|; all actual SIAs will + have less. + """ + return _null_sia(subsystem, phi=float('inf')) + + @staticmethod + def compute(cut, subsystem, ces): + """Evaluate a cut.""" + return evaluate_cut(subsystem, cut, ces) + + def process_result(self, new_sia, min_sia): + """Check if the new SIA has smaller |big_phi| than the standing + result. + """ + if new_sia.phi == 0: + self.done = True # Short-circuit + return new_sia + + elif new_sia < min_sia: + return new_sia + + return min_sia + + +def sia_bipartitions(nodes): + """Return all |big_phi| cuts for the given nodes. + + This value changes based on :const:`config.CUT_ONE_APPROXIMATION`. + + Args: + nodes (tuple[int]): The node indices to partition. + Returns: + list[Cut]: All unidirectional partitions. + """ + if config.CUT_ONE_APPROXIMATION: + bipartitions = directed_bipartition_of_one(nodes) + else: + # Don't consider trivial partitions where one part is empty + bipartitions = directed_bipartition(nodes, nontrivial=True) + + return [Cut(bipartition[0], bipartition[1]) + for bipartition in bipartitions] + + +def _ces(subsystem): + """Parallelize the unpartitioned |CauseEffectStructure| if parallelizing + cuts, since we have free processors because we're not computing any cuts + yet. + """ + return ces(subsystem, parallel=config.PARALLEL_CUT_EVALUATION) + + +@memory.cache(ignore=["subsystem"]) +def _sia(cache_key, subsystem): + """Return the minimal information partition of a subsystem. + + Args: + subsystem (Subsystem): The candidate set of nodes. + + Returns: + SystemIrreducibilityAnalysis: A nested structure containing all the + data from the intermediate calculations. The top level contains the + basic irreducibility information for the given subsystem. + """ + log.info('Calculating big-phi data for %s...', subsystem) + start = time() + + def time_annotated(bm, small_phi_time=0.0): + """Annote a |SystemIrreducibilityAnalysis| with the total elapsed + calculation time. + + Optionally add the time taken to calculate the unpartitioned + |CauseEffectStructure|. + """ + bm.time = round(time() - start, config.PRECISION) + bm.small_phi_time = round(small_phi_time, config.PRECISION) + return bm + + # Check for degenerate cases + # ========================================================================= + # Phi is necessarily zero if the subsystem is: + # - not strongly connected; + # - empty; + # - an elementary micro mechanism (i.e. no nontrivial bipartitions). + # So in those cases we immediately return a null SIA. + if not subsystem: + log.info('Subsystem %s is empty; returning null SIA ' + 'immediately.', subsystem) + return time_annotated(_null_sia(subsystem)) + + if not connectivity.is_strong(subsystem.cm, subsystem.node_indices): + log.info('%s is not strongly connected; returning null SIA ' + 'immediately.', subsystem) + return time_annotated(_null_sia(subsystem)) + + # Handle elementary micro mechanism cases. + # Single macro element systems have nontrivial bipartitions because their + # bipartitions are over their micro elements. + if len(subsystem.cut_indices) == 1: + # If the node lacks a self-loop, phi is trivially zero. + if not subsystem.cm[subsystem.node_indices][subsystem.node_indices]: + log.info('Single micro nodes %s without selfloops cannot have ' + 'phi; returning null SIA immediately.', subsystem) + return time_annotated(_null_sia(subsystem)) + # Even if the node has a self-loop, we may still define phi to be zero. + elif not config.SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI: + log.info('Single micro nodes %s with selfloops cannot have ' + 'phi; returning null SIA immediately.', subsystem) + return time_annotated(_null_sia(subsystem)) + # ========================================================================= + + log.debug('Finding unpartitioned CauseEffectStructure...') + small_phi_start = time() + unpartitioned_ces = _ces(subsystem) + small_phi_time = round(time() - small_phi_start, config.PRECISION) + + if not unpartitioned_ces: + log.info('Empty unpartitioned CauseEffectStructure; returning null ' + 'SIA immediately.') + # Short-circuit if there are no concepts in the unpartitioned CES. + return time_annotated(_null_sia(subsystem)) + + log.debug('Found unpartitioned CauseEffectStructure.') + if len(subsystem.cut_indices) == 1: + cuts = [Cut(subsystem.cut_indices, subsystem.cut_indices)] + else: + cuts = sia_bipartitions(subsystem.cut_indices) + engine = ComputeSystemIrreducibility( + cuts, subsystem, unpartitioned_ces) + min_sia = engine.run(config.PARALLEL_CUT_EVALUATION) + result = time_annotated(min_sia, small_phi_time) + + if config.CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA: + log.debug('Clearing subsystem caches.') + subsystem.clear_caches() + + log.info('Finished calculating big-phi data for %s.', subsystem) + + return result + + +# TODO(maintainance): don't forget to add any new configuration options here if +# they can change big-phi values +def _sia_cache_key(subsystem): + """The cache key of the subsystem. + + This includes the native hash of the subsystem and all configuration values + which change the results of ``sia``. + """ + return ( + hash(subsystem), + config.ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS, + config.CUT_ONE_APPROXIMATION, + config.MEASURE, + config.PRECISION, + config.VALIDATE_SUBSYSTEM_STATES, + config.SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI, + config.PARTITION_TYPE, + ) + + +# Wrapper to ensure that the cache key is the native hash of the subsystem, so +# joblib doesn't mistakenly recompute things when the subsystem's MICE cache is +# changed. The cache is also keyed on configuration values which affect the +# value of the computation. +@functools.wraps(_sia) +def sia(subsystem): # pylint: disable=missing-docstring + if config.SYSTEM_CUTS == 'CONCEPT_STYLE': + return sia_concept_style(subsystem) + + return _sia(_sia_cache_key(subsystem), subsystem) + + +def phi(subsystem): + """Return the |big_phi| value of a subsystem.""" + return sia(subsystem).phi + + +class ConceptStyleSystem: + """A functional replacement for ``Subsystem`` implementing concept-style + system cuts. + """ + + def __init__(self, subsystem, direction, cut=None): + self.subsystem = subsystem + self.direction = direction + self.cut = cut + self.cut_system = subsystem.apply_cut(cut) + + def apply_cut(self, cut): + return ConceptStyleSystem(self.subsystem, self.direction, cut) + + def __getattr__(self, name): + """Pass attribute access through to the basic subsystem.""" + # Unpickling calls `__getattr__` before the object's dict is populated; + # check that `subsystem` exists to avoid a recursion error. + # See https://bugs.python.org/issue5370. + if 'subsystem' in self.__dict__: + return getattr(self.subsystem, name) + raise AttributeError(name) + + def __len__(self): + return len(self.subsystem) + + @property + def cause_system(self): + return { + Direction.CAUSE: self.cut_system, + Direction.EFFECT: self.subsystem + }[self.direction] + + @property + def effect_system(self): + return { + Direction.CAUSE: self.subsystem, + Direction.EFFECT: self.cut_system + }[self.direction] + + def concept(self, mechanism, purviews=False, cause_purviews=False, + effect_purviews=False): + """Compute a concept, using the appropriate system for each side of the + cut. + """ + cause = self.cause_system.mic( + mechanism, purviews=(cause_purviews or purviews)) + + effect = self.effect_system.mie( + mechanism, purviews=(effect_purviews or purviews)) + + return Concept(mechanism=mechanism, cause=cause, effect=effect, + subsystem=self) + + def __str__(self): + return 'ConceptStyleSystem{}'.format(self.node_indices) + + +def concept_cuts(direction, node_indices): + """Generator over all concept-syle cuts for these nodes.""" + for partition in mip_partitions(node_indices, node_indices): + yield KCut(direction, partition) + + +def directional_sia(subsystem, direction, ces=None): + """Calculate a concept-style SystemIrreducibilityAnalysisCause or + SystemIrreducibilityAnalysisEffect. + """ + unpartitioned_ces = ces + + if unpartitioned_ces is None: + unpartitioned_ces = _ces(subsystem) + + c_system = ConceptStyleSystem(subsystem, direction) + cuts = concept_cuts(direction, c_system.cut_indices) + + # Run the default SIA engine + # TODO: verify that short-cutting works correctly? + engine = ComputeSystemIrreducibility( + cuts, c_system, unpartitioned_ces) + return engine.run(config.PARALLEL_CUT_EVALUATION) + + +# TODO: only return the minimal SIA, instead of both +class SystemIrreducibilityAnalysisConceptStyle(cmp.Orderable): + """Represents a |SIA| computed using concept-style system cuts.""" + + def __init__(self, sia_cause, sia_effect): + self.sia_cause = sia_cause + self.sia_effect = sia_effect + + @property + def min_sia(self): + return min(self.sia_cause, self.sia_effect, key=lambda m: m.phi) + + def __getattr__(self, name): + """Pass attribute access through to the minimal SIA.""" + if ('sia_cause' in self.__dict__ and 'sia_effect' in self.__dict__): + return getattr(self.min_sia, name) + raise AttributeError(name) + + def __eq__(self, other): + return cmp.general_eq(self, other, ['phi']) + + unorderable_unless_eq = ['network'] + + def order_by(self): + return [self.phi, len(self.subsystem)] + + def __repr__(self): + return repr(self.min_sia) + + def __str__(self): + return str(self.min_sia) + + +# TODO: cache +def sia_concept_style(subsystem): + """Compute a concept-style SystemIrreducibilityAnalysis""" + unpartitioned_ces = _ces(subsystem) + + sia_cause = directional_sia(subsystem, Direction.CAUSE, + unpartitioned_ces) + sia_effect = directional_sia(subsystem, Direction.EFFECT, + unpartitioned_ces) + + return SystemIrreducibilityAnalysisConceptStyle(sia_cause, sia_effect) diff --git a/pyphi/conf.py b/pyphi/conf.py index 8ce13a19c..3ec235863 100644 --- a/pyphi/conf.py +++ b/pyphi/conf.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # conf.py -''' +""" Loading a configuration ~~~~~~~~~~~~~~~~~~~~~~~ @@ -28,7 +28,7 @@ >>> print(pyphi.config) # doctest: +SKIP { 'ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS': False, - 'CACHE_BIGMIPS': False, + 'CACHE_SIAS': False, 'CACHE_POTENTIAL_PURVIEWS': True, 'CACHING_BACKEND': 'fs', ... @@ -56,9 +56,11 @@ - :attr:`~pyphi.conf.PyphiConfig.MEASURE` - :attr:`~pyphi.conf.PyphiConfig.PARTITION_TYPE` - :attr:`~pyphi.conf.PyphiConfig.PICK_SMALLEST_PURVIEW` -- :attr:`~pyphi.conf.PyphiConfig.USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE` +- :attr:`~pyphi.conf.PyphiConfig.USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE` - :attr:`~pyphi.conf.PyphiConfig.SYSTEM_CUTS` - :attr:`~pyphi.conf.PyphiConfig.SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI` +- :attr:`~pyphi.conf.PyphiConfig.VALIDATE_SUBSYSTEM_STATES` +- :attr:`~pyphi.conf.PyphiConfig.VALIDATE_CONDITIONAL_INDEPENDENCE` Parallelization and system resources @@ -79,8 +81,9 @@ and ``PARALLEL_COMPLEX_EVALUATION`` can be set to ``True`` at a time. For maximal efficiency, you should parallelize the highest level computations possible, *e.g.*, parallelize complex evaluation instead of cut evaluation, - but only if you are actually computing complexes. You should only - parallelize concept evaluation if you are just computing constellations. + but only if you are actually computing a complex. You should only + parallelize concept evaluation if you are just computing a + |CauseEffectStructure|. - :attr:`~pyphi.conf.PyphiConfig.NUMBER_OF_CORES` - :attr:`~pyphi.conf.PyphiConfig.MAXIMUM_CACHE_MEMORY_PERCENTAGE` @@ -91,8 +94,10 @@ PyPhi provides a number of ways to cache intermediate results. -- :attr:`~pyphi.conf.PyphiConfig.CACHE_BIGMIPS` +- :attr:`~pyphi.conf.PyphiConfig.CACHE_SIAS` +- :attr:`~pyphi.conf.PyphiConfig.CACHE_REPERTOIRES` - :attr:`~pyphi.conf.PyphiConfig.CACHE_POTENTIAL_PURVIEWS` +- :attr:`~pyphi.conf.PyphiConfig.CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA` - :attr:`~pyphi.conf.PyphiConfig.CACHING_BACKEND` - :attr:`~pyphi.conf.PyphiConfig.FS_CACHE_VERBOSITY` - :attr:`~pyphi.conf.PyphiConfig.FS_CACHE_DIRECTORY` @@ -125,16 +130,9 @@ - :attr:`~pyphi.conf.PyphiConfig.PRECISION` -Miscellaneous -~~~~~~~~~~~~~ - -- :attr:`~pyphi.conf.PyphiConfig.VALIDATE_SUBSYSTEM_STATES` -- :attr:`~pyphi.conf.PyphiConfig.VALIDATE_CONDITIONAL_INDEPENDENCE` - - The ``config`` API ~~~~~~~~~~~~~~~~~~ -''' +""" # pylint: disable=too-few-public-methods,protected-access @@ -153,7 +151,7 @@ class Option: - '''A descriptor implementing PyPhi configuration options. + """A descriptor implementing PyPhi configuration options. Args: default: The default value of this ``Option``. @@ -166,7 +164,8 @@ class Option: of the option is changed. The ``Config`` instance is passed as the only argument to the callback. doc (str): Optional docstring for the option. - ''' + """ + def __init__(self, default, values=None, on_change=None, doc=None): self.default = default self.values = values @@ -200,19 +199,19 @@ def __set__(self, obj, value): self._callback(obj) def _validate(self, value): - '''Validate the new value.''' + """Validate the new value.""" if self.values and value not in self.values: raise ValueError( '{} is not a valid value for {}'.format(value, self.name)) def _callback(self, obj): - '''Trigger any callbacks.''' + """Trigger any callbacks.""" if self.on_change is not None: self.on_change(obj) class ConfigMeta(type): - '''Metaclass for ``Config``. + """Metaclass for ``Config``. Responsible for setting the name of each ``Option`` when a subclass of ``Config`` is created; because ``Option`` objects are defined on the class, @@ -221,7 +220,8 @@ class ConfigMeta(type): Python 3.6 handles this exact need with the special descriptor method ``__set_name__`` (see PEP 487). We should use that once we drop support for 3.4 & 3.5. - ''' + """ + def __init__(cls, cls_name, bases, namespace): super().__init__(cls_name, bases, namespace) for name, opt in cls.options().items(): @@ -229,10 +229,11 @@ def __init__(cls, cls_name, bases, namespace): class Config(metaclass=ConfigMeta): - '''Base configuration object. + """Base configuration object. See ``PyphiConfig`` for usage. - ''' + """ + def __init__(self): self._values = {} self._loaded_files = [] @@ -259,20 +260,20 @@ def __setattr__(self, name, value): @classmethod def options(cls): - '''Return a dictionary the ``Option`` objects for this config''' + """Return a dictionary the ``Option`` objects for this config""" return {k: v for k, v in cls.__dict__.items() if isinstance(v, Option)} def defaults(self): - '''Return the default values of this configuration.''' + """Return the default values of this configuration.""" return {k: v.default for k, v in self.options().items()} def load_config_dict(self, dct): - '''Load a dictionary of configuration values.''' + """Load a dictionary of configuration values.""" for k, v in dct.items(): setattr(self, k, v) def load_config_file(self, filename): - '''Load config from a YAML file.''' + """Load config from a YAML file.""" filename = os.path.abspath(filename) with open(filename) as f: @@ -281,11 +282,11 @@ def load_config_file(self, filename): self._loaded_files.append(filename) def snapshot(self): - '''Return a snapshot of the current values of this configuration.''' + """Return a snapshot of the current values of this configuration.""" return copy(self._values) def override(self, **new_values): - '''Decorator and context manager to override configuration values. + """Decorator and context manager to override configuration values. The initial configuration values are reset after the decorated function returns or the context manager completes it block, even if the function @@ -302,12 +303,12 @@ def override(self, **new_values): >>> with config.override(PRECISION=100): ... assert config.PRECISION == 100 ... - ''' + """ return _override(self, **new_values) class _override(contextlib.ContextDecorator): - '''See ``Config.override`` for usage.''' + """See ``Config.override`` for usage.""" def __init__(self, conf, **new_values): self.conf = conf @@ -315,17 +316,17 @@ def __init__(self, conf, **new_values): self.initial_values = conf.snapshot() def __enter__(self): - '''Save original config values; override with new ones.''' + """Save original config values; override with new ones.""" self.conf.load_config_dict(self.new_values) def __exit__(self, *exc): - '''Reset config to initial values; reraise any exceptions.''' + """Reset config to initial values; reraise any exceptions.""" self.conf.load_config_dict(self.initial_values) return False def configure_logging(conf): - '''Reconfigure PyPhi logging based on the current configuration.''' + """Reconfigure PyPhi logging based on the current configuration.""" logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, @@ -357,7 +358,7 @@ def configure_logging(conf): class PyphiConfig(Config): - '''``pyphi.config`` is an instance of this class.''' + """``pyphi.config`` is an instance of this class.""" ASSUME_CUTS_CANNOT_CREATE_NEW_CONCEPTS = Option(False, doc=""" In certain cases, making a cut can actually cause a previously reducible @@ -384,12 +385,12 @@ class PyphiConfig(Config): PARALLEL_CONCEPT_EVALUATION = Option(False, doc=""" Controls whether concepts are evaluated in parallel when computing - constellations.""") + cause-effect structures.""") PARALLEL_CUT_EVALUATION = Option(True, doc=""" Controls whether system cuts are evaluated in parallel, which is faster but - requires more memory. If cuts are evaluated sequentially, only two |BigMip| - instances need to be in memory at once.""") + requires more memory. If cuts are evaluated sequentially, only two + |SystemIrreducibilityAnalysis| instances need to be in memory at once.""") PARALLEL_COMPLEX_EVALUATION = Option(False, doc=""" Controls whether systems are evaluated in parallel when computing @@ -406,19 +407,32 @@ class PyphiConfig(Config): of them; to avoid thrashing, this setting limits the percentage of a system's RAM that the caches can collectively use.""") - CACHE_BIGMIPS = Option(False, doc=""" - PyPhi is equipped with a transparent caching system for |BigMip| objects which - stores them as they are computed to avoid having to recompute them later. This - makes it easy to play around interactively with the program, or to accumulate - results with minimal effort. For larger projects, however, it is recommended - that you manage the results explicitly, rather than relying on the cache. For - this reason it is disabled by default.""") + CACHE_SIAS = Option(False, doc=""" + PyPhi is equipped with a transparent caching system for + |SystemIrreducibilityAnalysis| objects which stores them as they are + computed to avoid having to recompute them later. This makes it easy to + play around interactively with the program, or to accumulate results with + minimal effort. For larger projects, however, it is recommended that you + manage the results explicitly, rather than relying on the cache. For this + reason it is disabled by default.""") + + CACHE_REPERTOIRES = Option(True, doc=""" + PyPhi caches cause and effect repertoires. This greatly improves speed, but + can consume a significant amount of memory. If you are experiencing memory + issues, try disabling this.""") CACHE_POTENTIAL_PURVIEWS = Option(True, doc=""" Controls whether the potential purviews of mechanisms of a network are cached. Caching speeds up computations by not recomputing expensive reducibility checks, but uses additional memory.""") + CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA = Option(False, doc=""" + Controls whether a |Subsystem|'s repertoire and MICE caches are cleared + with |Subsystem.clear_caches()| after computing the + |SystemIrreducibilityAnalysis|. If you don't need to do any more + computations after running |compute.sia()|, then enabling this may help + conserve memory.""") + CACHING_BACKEND = Option('fs', doc=""" Controls whether precomputed results are stored and read from a local filesystem-based cache in the current directory or from a database. Set @@ -444,7 +458,7 @@ class PyphiConfig(Config): effect if ``CACHING_BACKEND`` is ``'db'``).""") REDIS_CACHE = Option(False, doc=""" - Specifies whether to use Redis to cache |Mice|.""") + Specifies whether to use Redis to cache |MICE|.""") REDIS_CONFIG = Option({ 'host': 'localhost', @@ -482,8 +496,8 @@ class PyphiConfig(Config): Controls whether to show progress bars on the console. .. tip:: - If you are iterating over many systems rather than doing one long-running - calculation, consider disabling this for speed.""") + If you are iterating over many systems rather than doing one + long-running calculation, consider disabling this for speed.""") PRECISION = Option(6, doc=""" If ``MEASURE`` is ``EMD``, then the Earth Mover's Distance is calculated @@ -497,7 +511,7 @@ class PyphiConfig(Config): VALIDATE_SUBSYSTEM_STATES = Option(True, doc=""" Controls whether PyPhi checks if the subsystems's state is possible - (reachable with nonzero probability from some past state), given the + (reachable with nonzero probability from some previous state), given the subsystem's TPM (**which is conditioned on background conditions**). If this is turned off, then **calculated** |big_phi| **values may not be valid**, since they may be associated with a subsystem that could never be @@ -508,10 +522,11 @@ class PyphiConfig(Config): independent.""") SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI = Option(False, doc=""" - If set to ``True``, the Phi value of single micro-node subsystems is the - difference between their unpartitioned constellation (a single concept) and - the null concept. If set to False, their Phi is defined to be zero. Single - macro-node subsystems may always be cut, regardless of circumstances.""") + If set to ``True``, the |big_phi| value of single micro-node subsystems is + the difference between their unpartitioned |CauseEffectStructure| (a single + concept) and the null concept. If set to False, their |big_phi| is defined + to be zero. Single macro-node subsystems may always be cut, regardless of + circumstances.""") REPR_VERBOSITY = Option(2, values=[0, 1, 2], doc=""" Controls the verbosity of ``__repr__`` methods on PyPhi objects. Can be set @@ -565,30 +580,32 @@ class PyphiConfig(Config): where the mechanism in the third part is always empty. - In addition, in the case of a |small_phi|-tie when computing MICE, The - ``'TRIPARTITION'`` setting choses the MIP with smallest purview instead of - the largest (which is the default). + In addition, in the case of a |small_phi|-tie when computing a |MIC| or + |MIE|, The ``'TRIPARTITION'`` setting choses the MIP with smallest purview + instead of the largest (which is the default). Finally, if set to ``'ALL'``, all possible partitions will be tested.""") PICK_SMALLEST_PURVIEW = Option(False, doc=""" - When computing MICE, it is possible for several MIPs to have the same - |small_phi| value. If this setting is set to ``True`` the MIP with the - smallest purview is chosen; otherwise, the one with largest purview is + When computing a |MIC| or |MIE|, it is possible for several MIPs to have + the same |small_phi| value. If this setting is set to ``True`` the MIP with + the smallest purview is chosen; otherwise, the one with largest purview is chosen.""") - USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE = Option(False, doc=""" - If set to ``True``, the distance between constellations - (when computing a |BigMip|) is calculated using the difference between the - sum of |small_phi| in the constellations instead of the extended EMD.""") + USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE = Option(False, doc=""" + If set to ``True``, the distance between cause-effect structures (when + computing a |SystemIrreducibilityAnalysis|) is calculated using the + difference between the sum of |small_phi| in the cause-effect structures + instead of the extended EMD.""") - SYSTEM_CUTS = Option('3.0_STYLE', values=['3.0_STYLE', 'CONCEPT_STYLE'], doc=""" + SYSTEM_CUTS = Option('3.0_STYLE', values=['3.0_STYLE', 'CONCEPT_STYLE'], + doc=""" If set to ``'3.0_STYLE'``, then traditional IIT 3.0 cuts will be used when computing |big_phi|. If set to ``'CONCEPT_STYLE'``, then experimental concept-style system cuts will be used instead.""") def log(self): - '''Log current settings.''' + """Log current settings.""" log.info('PyPhi v%s', __about__.__version__) if self._loaded_files: log.info('Loaded configuration from %s', self._loaded_files) diff --git a/pyphi/connectivity.py b/pyphi/connectivity.py index 185bb9995..6bfc43520 100644 --- a/pyphi/connectivity.py +++ b/pyphi/connectivity.py @@ -1,17 +1,17 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# utils/connectivity.py +# connectivity.py -''' +""" Functions for determining network connectivity properties. -''' +""" import numpy as np from scipy.sparse.csgraph import connected_components def apply_boundary_conditions_to_cm(external_indices, cm): - '''Remove connections to or from external nodes.''' + """Remove connections to or from external nodes.""" cm = cm.copy() cm[external_indices, :] = 0 # Zero-out row cm[:, external_indices] = 0 # Zero-out columnt @@ -19,17 +19,17 @@ def apply_boundary_conditions_to_cm(external_indices, cm): def get_inputs_from_cm(index, cm): - '''Return indices of inputs to the node with the given index.''' + """Return indices of inputs to the node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[i][index]) def get_outputs_from_cm(index, cm): - '''Return indices of the outputs of node with the given index.''' + """Return indices of the outputs of node with the given index.""" return tuple(i for i in range(cm.shape[0]) if cm[index][i]) def causally_significant_nodes(cm): - '''Return indices of nodes that have both inputs and outputs.''' + """Return indices of nodes that have both inputs and outputs.""" inputs = cm.sum(0) outputs = cm.sum(1) nodes_with_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0) @@ -38,7 +38,7 @@ def causally_significant_nodes(cm): # TODO: better name? def relevant_connections(n, _from, to): - '''Construct a connectivity matrix. + """Construct a connectivity matrix. Args: n (int): The dimensions of the matrix @@ -48,7 +48,7 @@ def relevant_connections(n, _from, to): Returns: np.ndarray: An |n x n| connectivity matrix with the |i,jth| entry is ``1`` if |i| is in ``_from`` and |j| is in ``to``, and 0 otherwise. - ''' + """ cm = np.zeros((n, n)) # Don't try and index with empty arrays. Older versions of NumPy @@ -61,7 +61,7 @@ def relevant_connections(n, _from, to): def block_cm(cm): - '''Return whether ``cm`` can be arranged as a block connectivity matrix. + """Return whether ``cm`` can be arranged as a block connectivity matrix. If so, the corresponding mechanism/purview is trivially reducible. Technically, only square matrices are "block diagonal", but the notion of @@ -90,7 +90,7 @@ def block_cm(cm): D,E F,G does not change the structure of the graph. - ''' + """ if np.any(cm.sum(1) == 0): return True if np.all(cm.sum(1) == 1): @@ -133,13 +133,13 @@ def inputs_to(nodes): # TODO: simplify the conditional validation here and in block_cm # TODO: combine with fully_connected def block_reducible(cm, nodes1, nodes2): - '''Return whether connections from ``nodes1`` to ``nodes2`` are reducible. + """Return whether connections from ``nodes1`` to ``nodes2`` are reducible. Args: cm (np.ndarray): The network's connectivity matrix. nodes1 (tuple[int]): Source nodes nodes2 (tuple[int]): Sink nodes - ''' + """ # Trivial case if not nodes1 or not nodes2: return True @@ -155,7 +155,7 @@ def block_reducible(cm, nodes1, nodes2): def _connected(cm, nodes, connection): - '''Test connectivity for the connectivity matrix.''' + """Test connectivity for the connectivity matrix.""" if nodes is not None: cm = cm[np.ix_(nodes, nodes)] @@ -164,7 +164,7 @@ def _connected(cm, nodes, connection): def is_strong(cm, nodes=None): - '''Return whether the connectivity matrix is strongly connected. + """Return whether the connectivity matrix is strongly connected. Remember that a singleton graph is strongly connected. @@ -173,24 +173,24 @@ def is_strong(cm, nodes=None): Keyword Args: nodes (tuple[int]): A subset of nodes to consider. - ''' + """ return _connected(cm, nodes, 'strong') def is_weak(cm, nodes=None): - '''Return whether the connectivity matrix is weakly connected. + """Return whether the connectivity matrix is weakly connected. Args: cm (np.ndarray): A square connectivity matrix. Keyword Args: nodes (tuple[int]): A subset of nodes to consider. - ''' + """ return _connected(cm, nodes, 'weak') def is_full(cm, nodes1, nodes2): - '''Test connectivity of one set of nodes to another. + """Test connectivity of one set of nodes to another. Args: cm (``np.ndarrray``): The connectivity matrix @@ -204,7 +204,7 @@ def is_full(cm, nodes1, nodes2): ``nodes2`` and all elements in ``nodes2`` have an input from some element in ``nodes1``, or if either set of nodes is empty; ``False`` otherwise. - ''' + """ if not nodes1 or not nodes2: return True diff --git a/pyphi/constants.py b/pyphi/constants.py index da081e809..17c2675fd 100644 --- a/pyphi/constants.py +++ b/pyphi/constants.py @@ -2,16 +2,16 @@ # -*- coding: utf-8 -*- # constants.py -''' +""" Package-wide constants. -''' +""" import pickle import joblib # Import Direction for backwards compatibility -from . import config, Direction +from . import Direction, config #: The threshold below which we consider differences in phi values to be zero. EPSILON = 10 ** - config.PRECISION diff --git a/pyphi/convert.py b/pyphi/convert.py index 259d74d44..d7876ec87 100644 --- a/pyphi/convert.py +++ b/pyphi/convert.py @@ -2,12 +2,12 @@ # -*- coding: utf-8 -*- # convert.py -''' +""" Conversion functions. See the documentation on PyPhi :ref:`tpm-conventions` for information on the different representations that these functions convert between. -''' +""" import logging from math import log2 @@ -19,7 +19,7 @@ def reverse_bits(i, n): - '''Reverse the bits of the ``n``-bit decimal number ``i``. + """Reverse the bits of the ``n``-bit decimal number ``i``. Examples: >>> reverse_bits(12, 7) @@ -28,31 +28,33 @@ def reverse_bits(i, n): 0 >>> reverse_bits(1, 2) 2 - ''' + """ return int(bin(i)[2:].zfill(n)[::-1], 2) def nodes2indices(nodes): - '''Convert nodes to a tuple of their indices.''' + """Convert nodes to a tuple of their indices.""" return tuple(n.index for n in nodes) if nodes else () def nodes2state(nodes): - '''Convert nodes to a tuple of their states.''' + """Convert nodes to a tuple of their states.""" return tuple(n.state for n in nodes) if nodes else () -def holi2loli(i, n): - '''Convert between HOLI and LOLI for indices in ``range(n)``.''' +def be2le(i, n): + """Convert between big-endian and little-endian for indices in + ``range(n)``. + """ return reverse_bits(i, n) -loli2holi = holi2loli +le2be = be2le -def state2holi_index(state): - '''Convert a PyPhi state-tuple to a decimal index according to the HOLI - convention. +def state2be_index(state): + """Convert a PyPhi state-tuple to a decimal index according to the + big-endian convention. Args: state (tuple[int]): A state-tuple where the |ith| element of the tuple @@ -60,20 +62,20 @@ def state2holi_index(state): Returns: int: A decimal integer corresponding to a network state under the - HOLI convention. + big-endian convention. Examples: - >>> state2holi_index((1, 0, 0, 0, 0)) + >>> state2be_index((1, 0, 0, 0, 0)) 16 - >>> state2holi_index((1, 1, 1, 0, 0, 0, 0, 0)) + >>> state2be_index((1, 1, 1, 0, 0, 0, 0, 0)) 224 - ''' + """ return int(''.join(str(int(n)) for n in state), 2) -def state2loli_index(state): - '''Convert a PyPhi state-tuple to a decimal index according to the LOLI - convention. +def state2le_index(state): + """Convert a PyPhi state-tuple to a decimal index according to the + little-endian convention. Args: state (tuple[int]): A state-tuple where the |ith| element of the tuple @@ -81,26 +83,26 @@ def state2loli_index(state): Returns: int: A decimal integer corresponding to a network state under the - LOLI convention. + little-endian convention. Examples: - >>> state2loli_index((1, 0, 0, 0, 0)) + >>> state2le_index((1, 0, 0, 0, 0)) 1 - >>> state2loli_index((1, 1, 1, 0, 0, 0, 0, 0)) + >>> state2le_index((1, 1, 1, 0, 0, 0, 0, 0)) 7 - ''' + """ return int(''.join(str(int(n)) for n in state[::-1]), 2) -def loli_index2state(i, number_of_nodes): - '''Convert a decimal integer to a PyPhi state tuple with the LOLI +def le_index2state(i, number_of_nodes): + """Convert a decimal integer to a PyPhi state tuple with the little-endian convention. - The output is the reverse of |holi_index2state|. + The output is the reverse of |be_index2state()|. Args: i (int): A decimal integer corresponding to a network state under the - LOLI convention. + little-endian convention. Returns: tuple[int]: A state-tuple where the |ith| element of the tuple gives @@ -108,24 +110,24 @@ def loli_index2state(i, number_of_nodes): Examples: >>> number_of_nodes = 5 - >>> loli_index2state(1, number_of_nodes) + >>> le_index2state(1, number_of_nodes) (1, 0, 0, 0, 0) >>> number_of_nodes = 8 - >>> loli_index2state(7, number_of_nodes) + >>> le_index2state(7, number_of_nodes) (1, 1, 1, 0, 0, 0, 0, 0) - ''' + """ return tuple((i >> n) & 1 for n in range(number_of_nodes)) -def holi_index2state(i, number_of_nodes): - '''Convert a decimal integer to a PyPhi state tuple using the HOLI - convention that high-order bits correspond to low-index nodes. +def be_index2state(i, number_of_nodes): + """Convert a decimal integer to a PyPhi state tuple using the big-endian + convention that the most-significant bits correspond to low-index nodes. - The output is the reverse of |loli_index2state|. + The output is the reverse of |le_index2state()|. Args: i (int): A decimal integer corresponding to a network state under the - HOLI convention. + big-endian convention. Returns: tuple[int]: A state-tuple where the |ith| element of the tuple gives @@ -133,17 +135,18 @@ def holi_index2state(i, number_of_nodes): Examples: >>> number_of_nodes = 5 - >>> holi_index2state(1, number_of_nodes) + >>> be_index2state(1, number_of_nodes) (0, 0, 0, 0, 1) >>> number_of_nodes = 8 - >>> holi_index2state(7, number_of_nodes) + >>> be_index2state(7, number_of_nodes) (0, 0, 0, 0, 0, 1, 1, 1) - ''' - return loli_index2state(i, number_of_nodes)[::-1] + """ + return le_index2state(i, number_of_nodes)[::-1] -def holi2loli_state_by_state(tpm): - '''Convert a state-by-state TPM from HOLI to LOLI or vice versa. +def be2le_state_by_state(tpm): + """Convert a state-by-state TPM from big-endian to little-endian or vice + versa. Args: tpm (np.ndarray): A state-by-state TPM. @@ -153,46 +156,46 @@ def holi2loli_state_by_state(tpm): Example: >>> tpm = np.arange(16).reshape([4, 4]) - >>> holi2loli_state_by_state(tpm) + >>> be2le_state_by_state(tpm) array([[ 0., 1., 2., 3.], [ 8., 9., 10., 11.], [ 4., 5., 6., 7.], [ 12., 13., 14., 15.]]) - ''' - loli = np.empty(tpm.shape) + """ + le = np.empty(tpm.shape) N = tpm.shape[0] n = int(log2(N)) for i in range(N): - loli[i, :] = tpm[holi2loli(i, n), :] - return loli + le[i, :] = tpm[be2le(i, n), :] + return le -loli2holi_state_by_state = holi2loli_state_by_state +le2be_state_by_state = be2le_state_by_state -def to_n_dimensional(tpm): - '''Reshape a state-by-node TPM to the n-dimensional form. +def to_multidimensional(tpm): + """Reshape a state-by-node TPM to the multidimensional form. See documentation for the |Network| object for more information on TPM formats. - ''' + """ # Cast to np.array. tpm = np.array(tpm) # Get the number of nodes. N = tpm.shape[-1] - # Reshape. We use Fortran ordering here so that the rows use the LOLI - # convention (low-order bits correspond to low-index nodes). Note that this - # does not change the actual memory layout (C- or Fortran-contiguous), so - # there is no performance loss. + # Reshape. We use Fortran ordering here so that the rows use the + # little-endian convention (least-significant bits correspond to low-index + # nodes). Note that this does not change the actual memory layout (C- or + # Fortran-contiguous), so there is no performance loss. return tpm.reshape([2] * N + [N], order="F").astype(float) -def to_2_dimensional(tpm): - '''Reshape a state-by-node TPM to the 2-dimensional form. +def to_2dimensional(tpm): + """Reshape a state-by-node TPM to the 2-dimensional form. - See documentation for the |Network| object for more information on TPM - formats. - ''' + See :ref:`tpm-conventions` and documentation for the |Network| object for + more information on TPM representations. + """ # Cast to np.array. tpm = np.array(tpm) # Get the number of nodes. @@ -202,7 +205,7 @@ def to_2_dimensional(tpm): def state_by_state2state_by_node(tpm): - '''Convert a state-by-state TPM to a state-by-node TPM. + """Convert a state-by-state TPM to a state-by-node TPM. .. danger:: Many nondeterministic state-by-state TPMs can be represented by a @@ -214,17 +217,18 @@ def state_by_state2state_by_node(tpm): .. note:: The indices of the rows and columns of the state-by-state TPM are - assumed to follow the LOLI convention. The indices of the rows of the - resulting state-by-node TPM also follow the LOLI convention. See the - documentation on PyPhi the :ref:`tpm-conventions` more information. + assumed to follow the little-endian convention. The indices of the rows + of the resulting state-by-node TPM also follow the little-endian + convention. See the documentation on PyPhi the :ref:`tpm-conventions` + more information. Args: tpm (list[list] or np.ndarray): A square state-by-state TPM with row - and column indices following the LOLI convention. + and column indices following the little-endian convention. Returns: np.ndarray: A state-by-node TPM, with row indices following the - LOLI convention. + little-endian convention. Example: >>> tpm = np.array([[0.5, 0.5, 0.0, 0.0], @@ -237,7 +241,7 @@ def state_by_state2state_by_node(tpm): [[ 1. , 0. ], [ 0.3, 0.7]]]) - ''' + """ # Cast to np.array. tpm = np.array(tpm) # Get the number of states from the length of one side of the TPM. @@ -246,14 +250,14 @@ def state_by_state2state_by_node(tpm): N = int(log2(S)) # Initialize the new state-by node TPM. sbn_tpm = np.zeros(([2] * N + [N])) - # Map indices to state-tuples with the LOLI convention. - states = {i: loli_index2state(i, N) for i in range(S)} + # Map indices to state-tuples with the little-endian convention. + states = {i: le_index2state(i, N) for i in range(S)} # Get an array for each node with 1 in positions that correspond to that # node being on in the next state, and a 0 otherwise. node_on = np.array([[states[i][n] for i in range(S)] for n in range(N)]) on_probabilities = [tpm * node_on[n] for n in range(N)] for i, state in states.items(): - # Get the probability of each node being on given the past state i, + # Get the probability of each node being on given the previous state i, # i.e., a row of the state-by-node TPM. # Assign that row to the ith state in the state-by-node TPM. sbn_tpm[state] = [np.sum(on_probabilities[n][i]) for n in range(N)] @@ -264,7 +268,7 @@ def state_by_state2state_by_node(tpm): # TODO add documentation on TPM representation and conditional independence and # reference it here def state_by_node2state_by_state(tpm): - '''Convert a state-by-node TPM to a state-by-state TPM. + """Convert a state-by-node TPM to a state-by-state TPM. .. important:: A nondeterministic state-by-node TPM can have more than one @@ -275,19 +279,19 @@ def state_by_node2state_by_state(tpm): .. note:: The indices of the rows of the state-by-node TPM are assumed to follow - the LOLI convention, while the indices of the columns follow the HOLI - convention. The indices of the rows and columns of the resulting - state-by-state TPM both follow the HOLI convention. See the - documentation on PyPhi :ref:`tpm-conventions` for more info. + the little-endian convention, while the indices of the columns follow + the big-endian convention. The indices of the rows and columns of the + resulting state-by-state TPM both follow the big-endian convention. See + the documentation on PyPhi :ref:`tpm-conventions` for more info. Args: tpm (list[list] or np.ndarray): A state-by-node TPM with row indices - following the LOLI convention and column indices following the HOLI - convention. + following the little-endian convention and column indices following + the big-endian convention. Returns: np.ndarray: A state-by-state TPM, with both row and column indices - following the HOLI convention. + following the big-endian convention. >>> tpm = np.array([[1, 1, 0], ... [0, 0, 1], @@ -306,11 +310,11 @@ def state_by_node2state_by_state(tpm): [ 0., 1., 0., 0., 0., 0., 0., 0.], [ 0., 0., 0., 0., 0., 0., 0., 1.], [ 0., 0., 0., 0., 0., 1., 0., 0.]]) - ''' + """ # Cast to np.array. tpm = np.array(tpm) - # Convert to n-dimensional form. - tpm = to_n_dimensional(tpm) + # Convert to multidimensional form. + tpm = to_multidimensional(tpm) # Get the number of nodes from the last dimension of the TPM. N = tpm.shape[-1] # Get the number of states. @@ -319,21 +323,23 @@ def state_by_node2state_by_state(tpm): sbs_tpm = np.zeros((S, S)) if not np.any(np.logical_and(tpm < 1, tpm > 0)): # TPM is deterministic. - for past_state_index in range(S): - # Use the LOLI convention to get the row and column indices. - past_state = loli_index2state(past_state_index, N) - current_state_index = state2loli_index(tpm[past_state]) - sbs_tpm[past_state_index, current_state_index] = 1 + for previous_state_index in range(S): + # Use the little-endian convention to get the row and column + # indices. + previous_state = le_index2state(previous_state_index, N) + current_state_index = state2le_index(tpm[previous_state]) + sbs_tpm[previous_state_index, current_state_index] = 1 else: # TPM is nondeterministic. - for past_state_index in range(S): - # Use the LOLI convention to get the row and column indices. - past_state = loli_index2state(past_state_index, N) - marginal_tpm = tpm[past_state] + for previous_state_index in range(S): + # Use the little-endian convention to get the row and column + # indices. + previous_state = le_index2state(previous_state_index, N) + marginal_tpm = tpm[previous_state] for current_state_index in range(S): current_state = np.array( - [i for i in loli_index2state(current_state_index, N)]) - sbs_tpm[past_state_index, current_state_index] = ( + [i for i in le_index2state(current_state_index, N)]) + sbs_tpm[previous_state_index, current_state_index] = ( np.prod(marginal_tpm[current_state == 1]) * np.prod(1 - marginal_tpm[current_state == 0])) return sbs_tpm @@ -341,15 +347,15 @@ def state_by_node2state_by_state(tpm): # Short aliases -h2l = holi2loli -l2h = loli2holi -l2s = loli_index2state -h2s = holi_index2state -s2l = state2loli_index -s2h = state2holi_index -h2l_sbs = holi2loli_state_by_state -l2h_sbs = loli2holi_state_by_state -to_n_d = to_n_dimensional -to_2_d = to_2_dimensional +b2l = be2le +l2b = le2be +l2s = le_index2state +b2s = be_index2state +s2l = state2le_index +s2b = state2be_index +b2l_sbs = be2le_state_by_state +l2b_sbs = le2be_state_by_state +to_md = to_multidimensional +to_2d = to_2dimensional sbn2sbs = state_by_node2state_by_state sbs2sbn = state_by_state2state_by_node diff --git a/pyphi/db.py b/pyphi/db.py index b7bca2e07..f1df5d150 100644 --- a/pyphi/db.py +++ b/pyphi/db.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # db.py -''' +""" Interface to MongoDB that exposes it as a key-value store. -''' +""" import pickle from collections import Iterable @@ -32,10 +32,10 @@ def find(key): - '''Return the value associated with a key. + """Return the value associated with a key. If there is no value with the given key, returns ``None``. - ''' + """ docs = list(collection.find({KEY_FIELD: key})) # Return None if we didn't find anything. if not docs: @@ -46,9 +46,10 @@ def find(key): def insert(key, value): - '''Store a value with a key. + """Store a value with a key. - If the key is already present in the database, this does nothing.''' + If the key is already present in the database, this does nothing. + """ # Pickle the value. value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL) # Store the value as binary data in a document. @@ -66,12 +67,13 @@ def insert(key, value): # TODO: check this singleton tuple business def generate_key(filtered_args): - '''Get a key from some input. + """Get a key from some input. This function should be used whenever a key is needed, to keep keys - consistent.''' + consistent. + """ # Convert the value to a (potentially singleton) tuple to be consistent # with joblib.filtered_args. if isinstance(filtered_args, Iterable): return hash(tuple(filtered_args)) - return hash((filtered_args, )) + return hash((filtered_args,)) diff --git a/pyphi/direction.py b/pyphi/direction.py index 9a709dacc..d3d11f549 100644 --- a/pyphi/direction.py +++ b/pyphi/direction.py @@ -2,27 +2,28 @@ # -*- coding: utf-8 -*- # direction.py -''' +""" Causal directions. -''' +""" from enum import Enum class Direction(Enum): - '''Constant that parametrizes cause and effect methods. + """Constant that parametrizes cause and effect methods. - Accessed using ``Direction.PAST`` and ``Direction.FUTURE``, etc. - ''' - PAST = 0 - FUTURE = 1 + Accessed using ``Direction.CAUSE`` and ``Direction.EFFECT``, etc. + """ + + CAUSE = 0 + EFFECT = 1 BIDIRECTIONAL = 2 def __str__(self): - if self is Direction.PAST: - return 'PAST' - elif self is Direction.FUTURE: - return 'FUTURE' + if self is Direction.CAUSE: + return 'CAUSE' + elif self is Direction.EFFECT: + return 'EFFECT' elif self is Direction.BIDIRECTIONAL: return 'BIDIRECTIONAL' @@ -34,16 +35,16 @@ def from_json(cls, dct): return cls(dct['direction']) def order(self, mechanism, purview): - '''Order the mechanism and purview in time. + """Order the mechanism and purview in time. - If the direction is ``PAST``, then the ``purview`` is at |t-1| and the - ``mechanism`` is at time |t|. If the direction is ``FUTURE``, then - the ``mechanism`` is at time |t| and the purview is at |t+1|. - ''' - if self is Direction.PAST: + If the direction is ``CAUSE``, then the purview is at |t-1| and the + mechanism is at time |t|. If the direction is ``EFFECT``, then the + mechanism is at time |t| and the purview is at |t+1|. + """ + if self is Direction.CAUSE: return purview, mechanism - elif self is Direction.FUTURE: + elif self is Direction.EFFECT: return mechanism, purview from . import validate - validate.direction(self) + return validate.direction(self) diff --git a/pyphi/distance.py b/pyphi/distance.py index 524e8ec38..7a0d1baf1 100644 --- a/pyphi/distance.py +++ b/pyphi/distance.py @@ -1,10 +1,11 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# utils/distance.py +# distance.py -''' +""" Functions for measuring distances. -''' +""" + from collections.abc import Mapping from contextlib import ContextDecorator @@ -23,7 +24,7 @@ class MeasureRegistry(Mapping): - '''Storage for measures registered with PyPhi. + """Storage for measures registered with PyPhi. Users can define custom measures: @@ -33,20 +34,21 @@ class MeasureRegistry(Mapping): ... return 0 And use them by setting ``config.MEASURE = 'ALWAYS_ZERO'``. - ''' + """ + def __init__(self): self.store = {} self._asymmetric = [] def register(self, name, asymmetric=False): - '''Decorator for registering a measure with PyPhi. + """Decorator for registering a measure with PyPhi. Args: name (string): The name of the measure. Keyword Args: asymmetric (boolean): ``True`` if the measure is asymmetric. - ''' + """ def register_func(func): if asymmetric: self._asymmetric.append(name) @@ -55,11 +57,11 @@ def register_func(func): return register_func def asymmetric(self): - '''Return a list of asymmetric measures.''' + """Return a list of asymmetric measures.""" return self._asymmetric def all(self): - '''Return a list of all registered measures.''' + """Return a list of all registered measures.""" return list(self) def __iter__(self): @@ -81,21 +83,22 @@ def __getitem__(self, name): class np_suppress(np.errstate, ContextDecorator): - '''Decorator to suppress NumPy warnings about divide-by-zero and + """Decorator to suppress NumPy warnings about divide-by-zero and multiplication of ``NaN``. .. note:: This should only be used in cases where you are *sure* that these warnings are not indicative of deeper issues in your code. - ''' + """ # pylint: disable=too-few-public-methods + def __init__(self): super().__init__(divide='ignore', invalid='ignore') # TODO extend to nonbinary nodes def _hamming_matrix(N): - '''Return a matrix of Hamming distances for the possible states of |N| + """Return a matrix of Hamming distances for the possible states of |N| binary nodes. Args: @@ -111,7 +114,7 @@ def _hamming_matrix(N): [ 1., 0., 2., 1.], [ 1., 2., 0., 1.], [ 2., 1., 1., 0.]]) - ''' + """ if N < _NUM_PRECOMPUTED_HAMMING_MATRICES: return _hamming_matrices[N] return _compute_hamming_matrix(N) @@ -119,7 +122,7 @@ def _hamming_matrix(N): @constants.joblib_memory.cache def _compute_hamming_matrix(N): - '''Compute and store a Hamming matrix for |N| nodes. + """Compute and store a Hamming matrix for |N| nodes. Hamming matrices have the following sizes:: @@ -139,7 +142,7 @@ def _compute_hamming_matrix(N): This function is only called when |N| > ``_NUM_PRECOMPUTED_HAMMING_MATRICES``. Don't call this function directly; use |_hamming_matrix| instead. - ''' + """ possible_states = np.array(list(utils.all_states((N)))) return cdist(possible_states, possible_states, 'hamming') * N @@ -147,24 +150,24 @@ def _compute_hamming_matrix(N): # TODO extend to binary nodes @measures.register('EMD') def hamming_emd(d1, d2): - '''Return the Earth Mover's Distance between two distributions (indexed + """Return the Earth Mover's Distance between two distributions (indexed by state, one dimension per node) using the Hamming distance between states as the transportation cost function. Singleton dimensions are sqeezed out. - ''' + """ N = d1.squeeze().ndim d1, d2 = flatten(d1), flatten(d2) return emd(d1, d2, _hamming_matrix(N)) def effect_emd(d1, d2): - '''Compute the EMD between two effect repertoires. + """Compute the EMD between two effect repertoires. Because the nodes are independent, the EMD between effect repertoires is equal to the sum of the EMDs between the marginal distributions of each node, and the EMD between marginal distribution for a node is the absolute - difference in the probabilities that the node is off. + difference in the probabilities that the node is OFF. Args: d1 (np.ndarray): The first repertoire. @@ -172,14 +175,14 @@ def effect_emd(d1, d2): Returns: float: The EMD between ``d1`` and ``d2``. - ''' + """ return sum(abs(marginal_zero(d1, i) - marginal_zero(d2, i)) for i in range(d1.ndim)) @measures.register('L1') def l1(d1, d2): - '''Return the L1 distance between two distributions. + """Return the L1 distance between two distributions. Args: d1 (np.ndarray): The first distribution. @@ -187,13 +190,13 @@ def l1(d1, d2): Returns: float: The sum of absolute differences of ``d1`` and ``d2``. - ''' + """ return np.absolute(d1 - d2).sum() @measures.register('KLD', asymmetric=True) def kld(d1, d2): - '''Return the Kullback-Leibler Divergence (KLD) between two distributions. + """Return the Kullback-Leibler Divergence (KLD) between two distributions. Args: d1 (np.ndarray): The first distribution. @@ -201,14 +204,14 @@ def kld(d1, d2): Returns: float: The KLD of ``d1`` from ``d2``. - ''' + """ d1, d2 = flatten(d1), flatten(d2) return entropy(d1, d2, 2.0) @measures.register('ENTROPY_DIFFERENCE') def entropy_difference(d1, d2): - '''Return the difference in entropy between two distributions.''' + """Return the difference in entropy between two distributions.""" d1, d2 = flatten(d1), flatten(d2) return abs(entropy(d1, base=2.0) - entropy(d2, base=2.0)) @@ -216,12 +219,12 @@ def entropy_difference(d1, d2): @measures.register('PSQ2') @np_suppress() def psq2(d1, d2): - '''Compute the PSQ2 measure. + """Compute the PSQ2 measure. Args: d1 (np.ndarray): The first distribution. d2 (np.ndarray): The second distribution. - ''' + """ d1, d2 = flatten(d1), flatten(d2) def f(p): @@ -233,12 +236,12 @@ def f(p): @measures.register('MP2Q', asymmetric=True) @np_suppress() def mp2q(p, q): - '''Compute the MP2Q measure. + """Compute the MP2Q measure. Args: p (np.ndarray): The unpartitioned repertoire q (np.ndarray): The partitioned repertoire - ''' + """ p, q = flatten(p), flatten(q) entropy_dist = 1 / len(p) return sum(entropy_dist * np.nan_to_num((p ** 2) / q * np.log(p / q))) @@ -247,19 +250,19 @@ def mp2q(p, q): @measures.register('BLD', asymmetric=True) @np_suppress() def bld(p, q): - '''Compute the Buzz Lightyear (Billy-Leo) Divergence.''' + """Compute the Buzz Lightyear (Billy-Leo) Divergence.""" p, q = flatten(p), flatten(q) return max(abs(p * np.nan_to_num(np.log(p / q)))) def directional_emd(direction, d1, d2): - '''Compute the EMD between two repertoires for a given direction. + """Compute the EMD between two repertoires for a given direction. The full EMD computation is used for cause repertoires. A fast analytic solution is used for effect repertoires. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. d1 (np.ndarray): The first repertoire. d2 (np.ndarray): The second repertoire. @@ -268,10 +271,10 @@ def directional_emd(direction, d1, d2): Raises: ValueError: If ``direction`` is invalid. - ''' - if direction == Direction.PAST: + """ + if direction == Direction.CAUSE: func = hamming_emd - elif direction == Direction.FUTURE: + elif direction == Direction.EFFECT: func = effect_emd else: # TODO: test that ValueError is raised @@ -280,27 +283,27 @@ def directional_emd(direction, d1, d2): return round(func(d1, d2), config.PRECISION) -def small_phi_measure(direction, d1, d2): - '''Compute the distance between two repertoires for the given direction. +def repertoire_distance(direction, r1, r2): + """Compute the distance between two repertoires for the given direction. Args: - direction (Direction): |PAST| or |FUTURE|. - d1 (np.ndarray): The first repertoire. - d2 (np.ndarray): The second repertoire. + direction (Direction): |CAUSE| or |EFFECT|. + r1 (np.ndarray): The first repertoire. + r2 (np.ndarray): The second repertoire. Returns: float: The distance between ``d1`` and ``d2``, rounded to |PRECISION|. - ''' + """ if config.MEASURE == 'EMD': - dist = directional_emd(direction, d1, d2) + dist = directional_emd(direction, r1, r2) else: - dist = measures[config.MEASURE](d1, d2) + dist = measures[config.MEASURE](r1, r2) return round(dist, config.PRECISION) -def big_phi_measure(r1, r2): - '''Compute the distance between two repertoires. +def system_repertoire_distance(r1, r2): + """Compute the distance between two repertoires of a system. Args: r1 (np.ndarray): The first repertoire. @@ -308,9 +311,10 @@ def big_phi_measure(r1, r2): Returns: float: The distance between ``r1`` and ``r2``. - ''' + """ if config.MEASURE in measures.asymmetric(): - raise ValueError('{} is asymmetric and cannot be used as a big-phi ' - 'measure.'.format(config.MEASURE)) + raise ValueError( + '{} is asymmetric and cannot be used as a system-level ' + 'irreducibility measure.'.format(config.MEASURE)) return measures[config.MEASURE](r1, r2) diff --git a/pyphi/distribution.py b/pyphi/distribution.py index 2ff898027..31b8b4b97 100644 --- a/pyphi/distribution.py +++ b/pyphi/distribution.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # distribution.py -''' +""" Functions for manipulating probability distributions. -''' +""" import numpy as np @@ -12,14 +12,14 @@ def normalize(a): - '''Normalize a distribution. + """Normalize a distribution. Args: a (np.ndarray): The array to normalize. Returns: np.ndarray: ``a`` normalized so that the sum of its entries is 1. - ''' + """ sum_a = a.sum() if sum_a == 0: return a @@ -28,7 +28,7 @@ def normalize(a): # TODO? remove this? doesn't seem to be used anywhere def uniform_distribution(number_of_nodes): - ''' + """ Return the uniform distribution for a set of binary nodes, indexed by state (so there is one dimension per node, the size of which is the number of possible states for that node). @@ -38,7 +38,7 @@ def uniform_distribution(number_of_nodes): Returns: np.ndarray: The uniform distribution over the set of nodes. - ''' + """ # The size of the state space for binary nodes is 2^(number of nodes). number_of_states = 2 ** number_of_nodes # Generate the maximum entropy distribution @@ -48,7 +48,7 @@ def uniform_distribution(number_of_nodes): def marginal_zero(repertoire, node_index): - '''Return the marginal probability that the node is off.''' + """Return the marginal probability that the node is OFF.""" index = [slice(None)] * repertoire.ndim index[node_index] = 0 @@ -56,14 +56,14 @@ def marginal_zero(repertoire, node_index): def marginal(repertoire, node_index): - '''Get the marginal distribution for a node.''' + """Get the marginal distribution for a node.""" index = tuple(i for i in range(repertoire.ndim) if i != node_index) return repertoire.sum(index, keepdims=True) def independent(repertoire): - '''Check whether the repertoire is independent.''' + """Check whether the repertoire is independent.""" marginals = [marginal(repertoire, i) for i in range(repertoire.ndim)] # TODO: is there a way to do without an explicit iteration? @@ -79,14 +79,14 @@ def independent(repertoire): def purview(repertoire): - '''The purview of the repertoire. + """The purview of the repertoire. Args: repertoire (np.ndarray): A repertoire Returns: tuple[int]: The purview that the repertoire was computed over. - ''' + """ if repertoire is None: return None @@ -94,19 +94,19 @@ def purview(repertoire): def purview_size(repertoire): - '''Return the size of the purview of the repertoire. + """Return the size of the purview of the repertoire. Args: repertoire (np.ndarray): A repertoire Returns: int: The size of purview that the repertoire was computed over. - ''' + """ return len(purview(repertoire)) def repertoire_shape(purview, N): # pylint: disable=redefined-outer-name - '''Return the shape a repertoire. + """Return the shape a repertoire. Args: purview (tuple[int]): The purview over which the repertoire is @@ -122,29 +122,30 @@ def repertoire_shape(purview, N): # pylint: disable=redefined-outer-name >>> N = 3 >>> repertoire_shape(purview, N) [2, 1, 2] - ''' + """ # TODO: extend to non-binary nodes return [2 if i in purview else 1 for i in range(N)] -def flatten(repertoire, holi=False): - '''Flatten a repertoire, removing empty dimensions. +def flatten(repertoire, big_endian=False): + """Flatten a repertoire, removing empty dimensions. - By default, the flattened repertoire is returned in LOLI order. + By default, the flattened repertoire is returned in little-endian order. Args: repertoire (np.ndarray or None): A repertoire. Keyword Args: - holi (boolean): If ``True``, flatten the repertoire in HOLI order. + big_endian (boolean): If ``True``, flatten the repertoire in big-endian + order. Returns: np.ndarray: The flattened repertoire. - ''' + """ if repertoire is None: return None - order = 'C' if holi else 'F' + order = 'C' if big_endian else 'F' # For efficiency, use `ravel` (which returns a view of the array) instead # of `np.flatten` (which copies the whole array). return repertoire.squeeze().ravel(order=order) @@ -152,7 +153,7 @@ def flatten(repertoire, holi=False): @cache(cache={}, maxmem=None) def max_entropy_distribution(node_indices, number_of_nodes): - '''Return the maximum entropy distribution over a set of nodes. + """Return the maximum entropy distribution over a set of nodes. This is different from the network's uniform distribution because nodes outside ``node_indices`` are fixed and treated as if they have only 1 @@ -165,7 +166,7 @@ def max_entropy_distribution(node_indices, number_of_nodes): Returns: np.ndarray: The maximum entropy distribution over the set of nodes. - ''' + """ distribution = np.ones(repertoire_shape(node_indices, number_of_nodes)) return distribution / distribution.size diff --git a/pyphi/examples.py b/pyphi/examples.py index 20677c7a8..9ef7082dc 100644 --- a/pyphi/examples.py +++ b/pyphi/examples.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # examples.py -''' +""" Example networks and subsystems to go along with the documentation. -''' +""" # pylint: disable=too-many-lines # flake8: noqa @@ -22,7 +22,7 @@ def basic_network(cm=False): - '''A 3-node network of logic gates. + """A 3-node network of logic gates. Diagram:: @@ -39,27 +39,27 @@ def basic_network(cm=False): TPM: - +--------------+---------------+ - | Past state | Current state | - +--------------+---------------+ - | A, B, C | A, B, C | - +==============+===============+ - | 0, 0, 0 | 0, 0, 0 | - +--------------+---------------+ - | 1, 0, 0 | 0, 0, 1 | - +--------------+---------------+ - | 0, 1, 0 | 1, 0, 1 | - +--------------+---------------+ - | 1, 1, 0 | 1, 0, 0 | - +--------------+---------------+ - | 0, 0, 1 | 1, 1, 0 | - +--------------+---------------+ - | 1, 0, 1 | 1, 1, 1 | - +--------------+---------------+ - | 0, 1, 1 | 1, 1, 1 | - +--------------+---------------+ - | 1, 1, 1 | 1, 1, 0 | - +--------------+---------------+ + +----------------+---------------+ + | Previous state | Current state | + +----------------+---------------+ + | A, B, C | A, B, C | + +================+===============+ + | 0, 0, 0 | 0, 0, 0 | + +----------------+---------------+ + | 1, 0, 0 | 0, 0, 1 | + +----------------+---------------+ + | 0, 1, 0 | 1, 0, 1 | + +----------------+---------------+ + | 1, 1, 0 | 1, 0, 0 | + +----------------+---------------+ + | 0, 0, 1 | 1, 1, 0 | + +----------------+---------------+ + | 1, 0, 1 | 1, 1, 1 | + +----------------+---------------+ + | 0, 1, 1 | 1, 1, 1 | + +----------------+---------------+ + | 1, 1, 1 | 1, 1, 0 | + +----------------+---------------+ Connectivity matrix: @@ -77,7 +77,7 @@ def basic_network(cm=False): |CM[i][j] = 1| means that there is a directed edge |(i,j)| from node |i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to |j|. - ''' + """ tpm = np.array([ [0, 0, 0], [0, 0, 1], @@ -96,24 +96,25 @@ def basic_network(cm=False): ]) else: cm = None - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def basic_state(): - '''The state of nodes in :func:`~pyphi.examples.basic_network`.''' + """The state of nodes in :func:`~pyphi.examples.basic_network`.""" return (1, 0, 0) def basic_subsystem(): - '''A subsystem containing all the nodes of the - :func:`~pyphi.examples.basic_network`.''' + """A subsystem containing all the nodes of the + :func:`~pyphi.examples.basic_network`. + """ net = basic_network() state = basic_state() return Subsystem(net, state, range(net.size)) def basic_noisy_selfloop_network(): - '''Based on the basic_network, but with added selfloops and noisy edges. + """Based on the basic_network, but with added selfloops and noisy edges. Nodes perform deterministic functions of their inputs, but those inputs may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with @@ -136,7 +137,7 @@ def basic_noisy_selfloop_network(): | | | | +~~~+ +~~~+ - ''' + """ tpm = np.array([ [0.271, 0.19, 0.244], [0.919, 0.19, 0.756], @@ -154,21 +155,22 @@ def basic_noisy_selfloop_network(): [1, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def basic_noisy_selfloop_subsystem(): - '''A subsystem containing all the nodes of the - :func:`~pyphi.examples.basic_noisy_selfloop_network`.''' + """A subsystem containing all the nodes of the + :func:`~pyphi.examples.basic_noisy_selfloop_network`. + """ net = basic_noisy_selfloop_network() state = basic_state() return Subsystem(net, state, range(net.size)) def residue_network(): - '''The network for the residue example. + """The network for the residue example. - Current and past state are all nodes off. + Current and previous state are all nodes OFF. Diagram:: @@ -200,7 +202,7 @@ def residue_network(): +---+---+---+---+---+---+ | E | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ - ''' + """ tpm = np.array([ [int(s) for s in bin(x)[2:].zfill(5)[::-1]] for x in range(32) ]) @@ -213,12 +215,13 @@ def residue_network(): cm[2:4, 0] = 1 cm[3:, 1] = 1 - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def residue_subsystem(): - '''The subsystem containing all the nodes of the - :func:`~pyphi.examples.residue_network`.''' + """The subsystem containing all the nodes of the + :func:`~pyphi.examples.residue_network`. + """ net = residue_network() state = (0, 0, 0, 0, 0) @@ -226,7 +229,7 @@ def residue_subsystem(): def xor_network(): - '''A fully connected system of three XOR gates. In the state ``(0, 0, 0)``, + """A fully connected system of three XOR gates. In the state ``(0, 0, 0)``, none of the elementary mechanisms exist. Diagram:: @@ -252,7 +255,7 @@ def xor_network(): +---+---+---+---+ | C | 1 | 1 | 0 | +---+---+---+---+ - ''' + """ tpm = np.array([ [0, 0, 0], [0, 1, 1], @@ -268,19 +271,20 @@ def xor_network(): [1, 0, 1], [1, 1, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def xor_subsystem(): - '''The subsystem containing all the nodes of the - :func:`~pyphi.examples.xor_network`.''' + """The subsystem containing all the nodes of the + :func:`~pyphi.examples.xor_network`. + """ net = xor_network() state = (0, 0, 0) return Subsystem(net, state, range(net.size)) def cond_depend_tpm(): - '''A system of two general logic gates A and B such if they are in the same + """A system of two general logic gates A and B such if they are in the same state they stay the same, but if they are in different states, they flip with probability 50%. @@ -314,7 +318,7 @@ def cond_depend_tpm(): +---+---+---+ | B | 1 | 0 | +---+---+---+ - ''' + """ tpm = np.array([ [1.0, 0.0, 0.0, 0.0], [0.0, 0.5, 0.5, 0.0], @@ -325,10 +329,10 @@ def cond_depend_tpm(): def cond_independ_tpm(): - '''A system of three general logic gates A, B and C such that: if A and B + """A system of three general logic gates A, B and C such that: if A and B are in the same state then they stay the same; if they are in different - states, they flip if C is **ON** and stay the same if C is **OFF**; and C - is **ON** 50% of the time, independent of the previous state. + states, they flip if C is ON and stay the same if C is OFF; and C is ON 50% + of the time, independent of the previous state. Diagram:: @@ -375,7 +379,7 @@ def cond_independ_tpm(): +---+---+---+---+ | C | 1 | 1 | 0 | +---+---+---+---+ - ''' + """ tpm = np.array([ [0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0], [0.0, 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0], @@ -390,14 +394,13 @@ def cond_independ_tpm(): def propagation_delay_network(): - '''A version of the primary example from the IIT 3.0 paper with + """A version of the primary example from the IIT 3.0 paper with deterministic COPY gates on each connection. These copy gates essentially function as propagation delays on the signal between OR, AND and XOR gates from the original system. - The current and past states of the network are also selected to mimic the - corresponding states from the IIT 3.0 paper. - + The current and previous states of the network are also selected to mimic + the corresponding states from the IIT 3.0 paper. Diagram:: @@ -447,38 +450,39 @@ def propagation_delay_network(): States: - In the IIT 3.0 paper example, the past state of the system has only the XOR - gate on. For the propagation delay network, this corresponds to a state of + In the IIT 3.0 paper example, the previous state of the system has only the + XOR gate ON. For the propagation delay network, this corresponds to a state + of ``(0, 0, 0, 1, 0, 0, 0, 0, 0)``. - The current state of the IIT 3.0 example has only the OR gate on. By + The current state of the IIT 3.0 example has only the OR gate ON. By advancing the propagation delay system two time steps, the current state - ``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding past state - ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``. - ''' + ``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous + state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``. + """ num_nodes = 9 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) - for past_state_index, past_state in enumerate(all_states(num_nodes)): + for previous_state_index, previous in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] - if past_state[2] == 1 or past_state[7] == 1: + if previous[2] == 1 or previous[7] == 1: current_state[0] = 1 - if past_state[0] == 1: + if previous[0] == 1: current_state[1] = 1 current_state[8] = 1 - if past_state[3] == 1: + if previous[3] == 1: current_state[2] = 1 current_state[4] = 1 - if past_state[1] == 1 ^ past_state[5] == 1: + if previous[1] == 1 ^ previous[5] == 1: current_state[3] = 1 - if past_state[4] == 1 and past_state[8] == 1: + if previous[4] == 1 and previous[8] == 1: current_state[6] = 1 - if past_state[6] == 1: + if previous[6] == 1: current_state[5] = 1 current_state[7] = 1 - tpm[past_state_index, :] = current_state + tpm[previous_state_index, :] = current_state cm = np.array([[0, 1, 0, 0, 0, 0, 0, 0, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0], @@ -490,13 +494,13 @@ def propagation_delay_network(): [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0]]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def macro_network(): - '''A network of micro elements which has greater integrated information + """A network of micro elements which has greater integrated information after coarse graining to a macro scale. - ''' + """ tpm = np.array([[0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], @@ -517,15 +521,16 @@ def macro_network(): def macro_subsystem(): - '''A subsystem containing all the nodes of - :func:`~pyphi.examples.macro_network`.''' + """A subsystem containing all the nodes of + :func:`~pyphi.examples.macro_network`. + """ net = macro_network() state = (0, 0, 0, 0) return Subsystem(net, state, range(net.size)) def blackbox_network(): - '''A micro-network to demonstrate blackboxing. + """A micro-network to demonstrate blackboxing. Diagram:: @@ -568,22 +573,22 @@ def blackbox_network(): In the documentation example, the state is (0, 0, 0, 0, 0, 0). - ''' + """ num_nodes = 6 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) - for index, past_state in enumerate(all_states(num_nodes)): + for index, previous_state in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] - if past_state[5] == 1: + if previous_state[5] == 1: current_state[0] = 1 current_state[1] = 1 - if past_state[0] == 1 and past_state[1]: + if previous_state[0] == 1 and previous_state[1]: current_state[2] = 1 - if past_state[2] == 1: + if previous_state[2] == 1: current_state[3] = 1 current_state[4] = 1 - if past_state[3] == 1 and past_state[4] == 1: + if previous_state[3] == 1 and previous_state[4] == 1: current_state[5] = 1 tpm[index, :] = current_state @@ -600,9 +605,9 @@ def blackbox_network(): def rule110_network(): - '''A network of three elements which follows the logic of - the Rule 110 cellular automaton with current and past - state (0, 0, 0). ''' + """A network of three elements which follows the logic of the Rule 110 + cellular automaton with current and previous state (0, 0, 0). + """ tpm = np.array([[0, 0, 0], [1, 0, 1], [1, 1, 0], @@ -615,8 +620,9 @@ def rule110_network(): def rule154_network(): - '''A network of three elements which follows the logic of the Rule 154 - cellular automaton.''' + """A network of three elements which follows the logic of the Rule 154 + cellular automaton. + """ tpm = np.array([ [0, 0, 0, 0, 0], [0, 1, 0, 0, 1], @@ -658,11 +664,11 @@ def rule154_network(): [0, 0, 1, 1, 1], [1, 0, 0, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig1a(): - '''The network shown in Figure 1A of the 2014 IIT 3.0 paper.''' + """The network shown in Figure 1A of the 2014 IIT 3.0 paper.""" tpm = np.array([ [0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0], @@ -737,11 +743,11 @@ def fig1a(): [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig3a(): - '''The network shown in Figure 3A of the 2014 IIT 3.0 paper.''' + """The network shown in Figure 3A of the 2014 IIT 3.0 paper.""" tpm = np.array([ [0.5, 0, 0, 0], [0.5, 0, 0, 0], @@ -766,11 +772,11 @@ def fig3a(): [1, 0, 0, 0], [1, 0, 0, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig3b(): - '''The network shown in Figure 3B of the 2014 IIT 3.0 paper.''' + """The network shown in Figure 3B of the 2014 IIT 3.0 paper.""" tpm = np.array([ [0, 0, 0, 0], [0, 0, 0, 0], @@ -795,11 +801,11 @@ def fig3b(): [1, 0, 0, 0], [1, 0, 0, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig4(): - '''The network shown in Figure 4 of the 2014 IIT 3.0 paper. + """The network shown in Figure 4 of the 2014 IIT 3.0 paper. Diagram:: @@ -814,7 +820,7 @@ def fig4(): | (AND) +~~~~~~>| (XOR) | +~~~~~~~+ +~~~~~~~+ - ''' + """ tpm = np.array([ [0, 0, 0], [0, 0, 1], @@ -830,11 +836,11 @@ def fig4(): [1, 0, 1], [1, 1, 0], ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig5a(): - '''The network shown in Figure 5A of the 2014 IIT 3.0 paper. + """The network shown in Figure 5A of the 2014 IIT 3.0 paper. Diagram:: @@ -848,7 +854,7 @@ def fig5a(): | (COPY) +~~~~~~>| (COPY) | +~~~~~~~~+ +~~~~~~~~+ - ''' + """ tpm = np.array([ [0, 0, 0], [0, 0, 0], @@ -864,11 +870,11 @@ def fig5a(): [1, 0, 1], [1, 1, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) def fig5b(): - '''The network shown in Figure 5B of the 2014 IIT 3.0 paper. + """The network shown in Figure 5B of the 2014 IIT 3.0 paper. Diagram:: @@ -882,7 +888,7 @@ def fig5b(): | (COPY) +~~~~~~>| (COPY) | +~~~~~~~~+ +~~~~~~~~+ - ''' + """ tpm = np.array([ [1, 0, 0], [1, 1, 1], @@ -898,7 +904,7 @@ def fig5b(): [0, 0, 1], [0, 1, 0] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) # The networks in figures 4, 6 and 8 are the same. @@ -909,7 +915,7 @@ def fig5b(): def fig16(): - '''The network shown in Figure 5B of the 2014 IIT 3.0 paper.''' + """The network shown in Figure 5B of the 2014 IIT 3.0 paper.""" tpm = np.array([ [0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 0], @@ -1049,20 +1055,16 @@ def fig16(): [0, 0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm, node_labels=LABELS[:tpm.shape[1]]) - + return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]]) -################################################################### -# # -# Actual Causation # -# # -################################################################### +# Actual Causation +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def actual_causation(): - '''The actual causation example network, consisting of an ``OR`` and + """The actual causation example network, consisting of an ``OR`` and ``AND`` gate with self-loops. - ''' + """ tpm = np.array([ [1, 0, 0, 0], [0, 1, 0, 0], @@ -1077,11 +1079,11 @@ def actual_causation(): def disjunction_conjunction_network(): - '''The disjunction-conjunction example from Actual Causation Figure 7. + """The disjunction-conjunction example from Actual Causation Figure 7. A network of four elements, one output ``D`` with three inputs ``A B C``. The output turns ON if ``A`` AND ``B`` are ON or if ``C`` is ON. - ''' + """ tpm = np.array([ [0, 0, 0, 0], [0, 0, 0, 0], @@ -1110,9 +1112,9 @@ def disjunction_conjunction_network(): def prevention(): - '''The |Transition| for the prevention example from Actual Causation + """The |Transition| for the prevention example from Actual Causation Figure 5D. - ''' + """ tpm = np.array([ [0.5, 0.5, 1], [0.5, 0.5, 0], diff --git a/pyphi/exceptions.py b/pyphi/exceptions.py index 94a296fa1..cc0e456ec 100644 --- a/pyphi/exceptions.py +++ b/pyphi/exceptions.py @@ -2,11 +2,11 @@ # -*- coding: utf-8 -*- # exceptions.py -'''PyPhi exceptions.''' +"""PyPhi exceptions.""" class StateUnreachableError(ValueError): - '''The current state cannot be reached from any past state.''' + """The current state cannot be reached from any previous state.""" def __init__(self, state): self.state = state @@ -15,8 +15,12 @@ def __init__(self, state): class ConditionallyDependentError(ValueError): - '''The TPM is conditionally dependent.''' + """The TPM is conditionally dependent.""" class JSONVersionError(ValueError): - '''JSON was serialized with a different version of PyPhi.''' + """JSON was serialized with a different version of PyPhi.""" + + +class WrongDirectionError(ValueError): + """The wrong direction was provided.""" diff --git a/pyphi/jsonify.py b/pyphi/jsonify.py index 3bf33d447..213175e82 100644 --- a/pyphi/jsonify.py +++ b/pyphi/jsonify.py @@ -6,7 +6,7 @@ # TODO: resolve schema issues with `vphi` and other external consumers # TODO: somehow check schema instead of version? -''' +""" PyPhi- and NumPy-aware JSON serialization. To be properly serialized and deserialized, PyPhi objects must implement a @@ -34,7 +34,7 @@ def from_json(cls, json): the JSON stream. The JSON decoder uses this metadata to recursively deserialize the stream to a nested PyPhi object structure. The decoder will raise an exception if current PyPhi version doesn't match the version in the JSON data. -''' +""" import json @@ -49,11 +49,11 @@ def from_json(cls, json): def _loadable_models(): - '''A dictionary of loadable PyPhi models. + """A dictionary of loadable PyPhi models. These are stored in this function (instead of module scope) to resolve circular import issues. - ''' + """ classes = [ Direction, pyphi.Network, @@ -65,17 +65,19 @@ def _loadable_models(): pyphi.models.Bipartition, pyphi.models.KPartition, pyphi.models.Tripartition, - pyphi.models.Mip, - pyphi.models.Mice, + pyphi.models.RepertoireIrreducibilityAnalysis, + pyphi.models.MaximallyIrreducibleCauseOrEffect, + pyphi.models.MaximallyIrreducibleCause, + pyphi.models.MaximallyIrreducibleEffect, pyphi.models.Concept, - pyphi.models.Constellation, - pyphi.models.BigMip, + pyphi.models.CauseEffectStructure, + pyphi.models.SystemIrreducibilityAnalysis, pyphi.Transition, pyphi.models.ActualCut, - pyphi.models.AcMip, + pyphi.models.AcRepertoireIrreducibilityAnalysis, pyphi.models.CausalLink, pyphi.models.Account, - pyphi.models.AcBigMip + pyphi.models.AcSystemIrreducibilityAnalysis ] return {cls.__name__: cls for cls in classes} @@ -102,10 +104,10 @@ def _pop_metadata(dct): def jsonify(obj): # pylint: disable=too-many-return-statements - '''Return a JSON-encodable representation of an object, recursively using + """Return a JSON-encodable representation of an object, recursively using any available ``to_json`` methods, converting NumPy arrays and datatypes to - native lists and types along the way.''' - + native lists and types along the way. + """ # Call the `to_json` method if available and add metadata. if hasattr(obj, 'to_json'): d = obj.to_json() @@ -139,19 +141,19 @@ def jsonify(obj): # pylint: disable=too-many-return-statements class PyPhiJSONEncoder(json.JSONEncoder): - '''JSONEncoder that allows serializing PyPhi objects with ``jsonify``.''' + """JSONEncoder that allows serializing PyPhi objects with ``jsonify``.""" def encode(self, obj): # pylint: disable=arguments-differ - '''Encode the output of ``jsonify`` with the default encoder.''' + """Encode the output of ``jsonify`` with the default encoder.""" return super().encode(jsonify(obj)) def iterencode(self, obj, **kwargs): # pylint: disable=arguments-differ - '''Analog to `encode` used by json.dump.''' + """Analog to `encode` used by json.dump.""" return super().iterencode(jsonify(obj), **kwargs) def _encoder_kwargs(user_kwargs): - '''Update kwargs for `dump` and `dumps` to use the PyPhi encoder.''' + """Update kwargs for `dump` and `dumps` to use the PyPhi encoder.""" kwargs = {'separators': (',', ':'), 'cls': PyPhiJSONEncoder} kwargs.update(user_kwargs) @@ -159,18 +161,19 @@ def _encoder_kwargs(user_kwargs): def dumps(obj, **user_kwargs): - '''Serialize ``obj`` as JSON-formatted stream.''' + """Serialize ``obj`` as JSON-formatted stream.""" return json.dumps(obj, **_encoder_kwargs(user_kwargs)) def dump(obj, fp, **user_kwargs): - '''Serialize ``obj`` as a JSON-formatted stream and write to ``fp`` (a - ``.write()``-supporting file-like object.''' + """Serialize ``obj`` as a JSON-formatted stream and write to ``fp`` (a + ``.write()``-supporting file-like object. + """ return json.dump(obj, fp, **_encoder_kwargs(user_kwargs)) def _check_version(version): - '''Check whether the JSON version matches the PyPhi version.''' + """Check whether the JSON version matches the PyPhi version.""" if version != pyphi.__version__: raise pyphi.exceptions.JSONVersionError( 'Cannot load JSON from a different version of PyPhi. ' @@ -179,20 +182,22 @@ def _check_version(version): def _is_model(dct): - '''Check if ``dct`` is a PyPhi model serialization.''' + """Check if ``dct`` is a PyPhi model serialization.""" return CLASS_KEY in dct class _ObjectCache(cache.DictCache): - '''Cache mapping ids to loaded objects, keyed by the id of the object.''' + """Cache mapping ids to loaded objects, keyed by the id of the object.""" + def key(self, dct, **kwargs): # pylint: disable=arguments-differ return _get_metadata(dct) class PyPhiJSONDecoder(json.JSONDecoder): - '''Extension of the default encoder which automatically deserializes + """Extension of the default encoder which automatically deserializes PyPhi JSON to the appropriate model classes. - ''' + """ + def __init__(self, *args, **kwargs): kwargs['object_hook'] = self._load_object super().__init__(*args, **kwargs) @@ -204,14 +209,14 @@ def __init__(self, *args, **kwargs): self._object_cache = _ObjectCache() def _load_object(self, obj): - '''Recursively load a PyPhi object. + """Recursively load a PyPhi object. PyPhi models are recursively loaded, using the model metadata to recreate the original object relations. Lists are cast to tuples because most objects in PyPhi which are serialized to lists (eg. mechanisms and purviews) are ultimately tuples. Other lists (tpms, repertoires) should be cast to the correct type in init methods. - ''' + """ if isinstance(obj, dict): obj = {k: self._load_object(v) for k, v in obj.items()} # Load a serialized PyPhi model @@ -225,10 +230,10 @@ def _load_object(self, obj): @cache.method('_object_cache') def _load_model(self, dct): - '''Load a serialized PyPhi model. + """Load a serialized PyPhi model. The object is memoized for reuse elsewhere in the object graph. - ''' + """ classname, version, _ = _pop_metadata(dct) _check_version(version) @@ -243,10 +248,10 @@ def _load_model(self, dct): def loads(string): - '''Deserialize a JSON string to a Python object.''' + """Deserialize a JSON string to a Python object.""" return json.loads(string, cls=PyPhiJSONDecoder) def load(fp): - '''Deserialize a JSON stream to a Python object.''' + """Deserialize a JSON stream to a Python object.""" return json.load(fp, cls=PyPhiJSONDecoder) diff --git a/pyphi/log.py b/pyphi/log.py index ac24e8f0f..c705dd2fa 100644 --- a/pyphi/log.py +++ b/pyphi/log.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# logging.py +# log.py -'''Utilities for logging and progress bars.''' +"""Utilities for logging and progress bars.""" import logging import threading @@ -12,7 +12,7 @@ # pylint: disable=arguments-differ class ProgressBar(tqdm.tqdm): - '''Thread safe progress-bar wrapper around ``tqdm``.''' + """Thread safe progress-bar wrapper around ``tqdm``.""" _lock = threading.RLock() @@ -39,9 +39,10 @@ def close(self): class ProgressBarHandler(logging.StreamHandler): - '''Logging handler that writes through ``tqdm`` in order to not break + """Logging handler that writes through ``tqdm`` in order to not break progress bars. - ''' + """ + def emit(self, record): try: msg = self.format(record) diff --git a/pyphi/macro.py b/pyphi/macro.py index 9ebe1b713..01fea52d4 100644 --- a/pyphi/macro.py +++ b/pyphi/macro.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # macro.py -''' +""" Methods for coarse-graining systems to different levels of spatial analysis. -''' +""" # pylint: disable=too-few-public-methods,too-many-arguments @@ -33,18 +33,17 @@ def reindex(indices): - '''Generate a new set of node indices, the size of indices.''' + """Generate a new set of node indices, the size of indices.""" return tuple(range(len(indices))) def rebuild_system_tpm(node_tpms): - '''Reconstruct the network TPM from a collection of node TPMs.''' - expanded_tpms = np.array([expand_node_tpm(tpm) for tpm in node_tpms]) - return np.moveaxis(expanded_tpms, 0, -1) + """Reconstruct the network TPM from a collection of node TPMs.""" + return np.stack([expand_node_tpm(tpm) for tpm in node_tpms], axis=-1) def remove_singleton_dimensions(tpm): - '''Remove singleton dimensions from the TPM. + """Remove singleton dimensions from the TPM. Singleton dimensions are created by conditioning on a set of elements. This removes those elements from the TPM, leaving a TPM that only @@ -52,7 +51,7 @@ def remove_singleton_dimensions(tpm): Note that indices used in the original TPM must be reindexed for the smaller TPM. - ''' + """ # Don't squeeze out the final dimension (which contains the probability) # for networks with one element. if tpm.ndim <= 2: @@ -62,15 +61,16 @@ def remove_singleton_dimensions(tpm): def node_labels(indices): - '''Labels for macro nodes.''' + """Return the labels for macro nodes.""" return tuple("m{}".format(i) for i in indices) def run_tpm(system, steps, blackbox): - '''Iterate the TPM for the given number of timesteps. + """Iterate the TPM for the given number of timesteps. - Returns tpm * (noise_tpm^(t-1)) - ''' + Returns: + np.ndarray: tpm * (noise_tpm^(t-1)) + """ # Generate noised TPM # Noise the connections from every output element to elements in other # boxes. @@ -97,11 +97,12 @@ def run_tpm(system, steps, blackbox): class SystemAttrs(namedtuple('SystemAttrs', ['tpm', 'cm', 'node_indices', 'state'])): - '''An immutable container that holds all the attributes of a subsystem. + """An immutable container that holds all the attributes of a subsystem. Versions of this object are passed down the steps of the micro-to-macro pipeline. - ''' + """ + @property def nodes(self): labels = node_labels(self.node_indices) @@ -122,7 +123,7 @@ def apply(self, system): class MacroSubsystem(Subsystem): - '''A subclass of |Subsystem| implementing macro computations. + """A subclass of |Subsystem| implementing macro computations. This subsystem performs blackboxing and coarse-graining of elements. @@ -136,7 +137,8 @@ class MacroSubsystem(Subsystem): After each macro update (temporal blackboxing, spatial blackboxing, and spatial coarse-graining) the TPM, CM, nodes, and state are updated so that they correctly represent the updated system. - ''' + """ + # TODO refactor the _blackbox_space, _coarsegrain_space methods to methods # on their respective Blackbox and CoarseGrain objects? This would nicely # abstract the logic into a discrete, disconnected transformation. @@ -195,11 +197,11 @@ def __init__(self, network, state, nodes, cut=None, mice_cache=None, @staticmethod def _squeeze(system): - '''Squeeze out all singleton dimensions in the Subsystem. + """Squeeze out all singleton dimensions in the Subsystem. Reindexes the subsystem so that the nodes are ``0..n`` where ``n`` is the number of internal indices in the system. - ''' + """ assert system.node_indices == tpm_indices(system.tpm) internal_indices = tpm_indices(system.tpm) @@ -224,8 +226,7 @@ def _squeeze(system): @staticmethod def _blackbox_partial_noise(blackbox, system): - '''Noise connections from hidden elements to other boxes.''' - + """Noise connections from hidden elements to other boxes.""" # Noise inputs from non-output elements hidden in other boxes node_tpms = [] for node in system.nodes: @@ -242,7 +243,7 @@ def _blackbox_partial_noise(blackbox, system): @staticmethod def _blackbox_time(time_scale, blackbox, system): - '''Black box the CM and TPM over the given time_scale.''' + """Black box the CM and TPM over the given time_scale.""" blackbox = blackbox.reindex() tpm = run_tpm(system, time_scale, blackbox) @@ -254,7 +255,7 @@ def _blackbox_time(time_scale, blackbox, system): return SystemAttrs(tpm, cm, system.node_indices, system.state) def _blackbox_space(self, blackbox, system): - '''Blackbox the TPM and CM in space. + """Blackbox the TPM and CM in space. Conditions the TPM on the current value of the hidden nodes. The CM is set to universal connectivity. @@ -264,7 +265,7 @@ def _blackbox_space(self, blackbox, system): This shrinks the size of the TPM by the number of hidden indices; now there is only `len(output_indices)` dimensions in the TPM and in the state of the subsystem. - ''' + """ tpm = marginalize_out(blackbox.hidden_indices, system.tpm) assert blackbox.output_indices == tpm_indices(tpm) @@ -286,8 +287,7 @@ def _blackbox_space(self, blackbox, system): @staticmethod def _coarsegrain_space(coarse_grain, is_cut, system): - '''Spatially coarse-grain the TPM and CM.''' - + """Spatially coarse-grain the TPM and CM.""" tpm = coarse_grain.macro_tpm( system.tpm, check_independence=(not is_cut)) @@ -302,20 +302,20 @@ def _coarsegrain_space(coarse_grain, is_cut, system): @property def cut_indices(self): - '''The indices of this system to be cut for |big_phi| computations. + """The indices of this system to be cut for |big_phi| computations. For macro computations the cut is applied to the underlying micro-system. - ''' + """ return self._node_indices @property def cut_mechanisms(self): - '''The mechanisms of this system that are currently cut. + """The mechanisms of this system that are currently cut. Note that although ``cut_indices`` returns micro indices, this returns macro mechanisms. - ''' + """ mechanisms = [] for mechanism in utils.powerset(self.node_indices, nonempty=True): micro_mechanism = self.macro2micro(mechanism) @@ -325,14 +325,14 @@ def cut_mechanisms(self): return tuple(mechanisms) def apply_cut(self, cut): - '''Return a cut version of this |MacroSubsystem|. + """Return a cut version of this |MacroSubsystem|. Args: cut (Cut): The cut to apply to this |MacroSubsystem|. Returns: MacroSubsystem: The cut version of this |MacroSubsystem|. - ''' + """ return MacroSubsystem( self.network, self._network_state, @@ -345,13 +345,15 @@ def apply_cut(self, cut): # mice_cache=self._mice_cache) def potential_purviews(self, direction, mechanism, purviews=False): - '''Override Subsystem implementation using Network-level indices.''' + """Override Subsystem implementation using Network-level indices.""" all_purviews = utils.powerset(self.node_indices) - return irreducible_purviews(self.cm, direction, mechanism, all_purviews) + return irreducible_purviews( + self.cm, direction, mechanism, all_purviews) def macro2micro(self, macro_indices): - '''Returns all micro indices which compose the elements specified by - ``macro_indices``.''' + """Return all micro indices which compose the elements specified by + ``macro_indices``. + """ def from_partition(partition, macro_indices): micro_indices = itertools.chain.from_iterable( partition[i] for i in macro_indices) @@ -369,9 +371,9 @@ def from_partition(partition, macro_indices): return macro_indices def macro2blackbox_outputs(self, macro_indices): - '''Given a set of macro elements, return the blackbox output elements + """Given a set of macro elements, return the blackbox output elements which compose these elements. - ''' + """ if not self._blackbox: raise ValueError('System is not blackboxed') @@ -386,9 +388,9 @@ def __str__(self): return repr(self) def __eq__(self, other): - '''Two macro systems are equal if each underlying |Subsystem| is equal + """Two macro systems are equal if each underlying |Subsystem| is equal and all macro attributes are equal. - ''' + """ if type(self) != type(other): # pylint: disable=unidiomatic-typecheck return False @@ -408,32 +410,32 @@ def __hash__(self): class CoarseGrain(namedtuple('CoarseGrain', ['partition', 'grouping'])): - '''Represents a coarse graining of a collection of nodes. + """Represents a coarse graining of a collection of nodes. Attributes: partition (tuple[tuple]): The partition of micro-elements into macro-elements. grouping (tuple[tuple[tuple]]): The grouping of micro-states into macro-states. - ''' + """ # TODO: validate? Currently implemented in validate.coarse_grain, but # should be moved here if this ever has an __init__ method @property def micro_indices(self): - '''Indices of micro elements represented in this coarse-graining.''' + """Indices of micro elements represented in this coarse-graining.""" return tuple(sorted(idx for part in self.partition for idx in part)) @property def macro_indices(self): - '''Indices of macro elements of this coarse-graining.''' + """Indices of macro elements of this coarse-graining.""" return tuple(range(len(self.partition))) def __len__(self): return len(self.partition) def reindex(self): - '''Re-index this coarse graining to use squeezed indices. + """Re-index this coarse graining to use squeezed indices. The output grouping is translated to use indices ``0..n``, where ``n`` is the number of micro indices in the coarse-graining. Re-indexing does @@ -448,7 +450,7 @@ def reindex(self): >>> coarse_grain = CoarseGrain(partition, grouping) >>> coarse_grain.reindex() CoarseGrain(partition=((0, 1),), grouping=(((0,), (1, 2)),)) - ''' + """ _map = dict(zip(self.micro_indices, reindex(self.micro_indices))) partition = tuple( tuple(_map[index] for index in group) @@ -457,7 +459,7 @@ def reindex(self): return CoarseGrain(partition, self.grouping) def macro_state(self, micro_state): - '''Translate a micro state to a macro state + """Translate a micro state to a macro state Args: micro_state (tuple[int]): The state of the micro nodes in this @@ -475,7 +477,7 @@ def macro_state(self, micro_state): (1,) >>> coarse_grain.macro_state((1, 1)) (1,) - ''' + """ assert len(micro_state) == len(self.micro_indices) # TODO: only reindex if this coarse grain is not already from 0..n? @@ -489,32 +491,32 @@ def macro_state(self, micro_state): for i in self.macro_indices) def make_mapping(self): - '''Return a mapping from micro-state to the macro-states based on the + """Return a mapping from micro-state to the macro-states based on the partition and state grouping of this coarse-grain. Return: (nd.ndarray): A mapping from micro-states to macro-states. The |ith| entry in the mapping is the macro-state corresponding to the |ith| micro-state. - ''' + """ micro_states = utils.all_states(len(self.micro_indices)) # Find the corresponding macro-state for each micro-state. # The i-th entry in the mapping is the macro-state corresponding to the # i-th micro-state. - mapping = [convert.state2loli_index(self.macro_state(micro_state)) + mapping = [convert.state2le_index(self.macro_state(micro_state)) for micro_state in micro_states] return np.array(mapping) def macro_tpm_sbs(self, state_by_state_micro_tpm): - '''Create a state-by-state coarse-grained macro TPM. + """Create a state-by-state coarse-grained macro TPM. Args: micro_tpm (nd.array): The state-by-state TPM of the micro-system. Returns: np.ndarray: The state-by-state TPM of the macro-system. - ''' + """ validate.tpm(state_by_state_micro_tpm, check_independence=False) mapping = self.make_mapping() @@ -525,18 +527,18 @@ def macro_tpm_sbs(self, state_by_state_micro_tpm): micro_states = range(2 ** len(self.micro_indices)) micro_state_transitions = itertools.product(micro_states, repeat=2) - # For every possible micro-state transition, get the corresponding past - # and current macro-state using the mapping and add that probability to - # the state-by-state macro TPM. - for past_state, current_state in micro_state_transitions: - macro_tpm[mapping[past_state], mapping[current_state]] += ( - state_by_state_micro_tpm[past_state, current_state]) + # For every possible micro-state transition, get the corresponding + # previous and next macro-state using the mapping and add that + # probability to the state-by-state macro TPM. + for previous_state, current_state in micro_state_transitions: + macro_tpm[mapping[previous_state], mapping[current_state]] += ( + state_by_state_micro_tpm[previous_state, current_state]) # Re-normalize each row because we're going from larger to smaller TPM return np.array([distribution.normalize(row) for row in macro_tpm]) def macro_tpm(self, micro_tpm, check_independence=True): - '''Create a coarse-grained macro TPM. + """Create a coarse-grained macro TPM. Args: micro_tpm (nd.array): The TPM of the micro-system. @@ -549,7 +551,7 @@ def macro_tpm(self, micro_tpm, check_independence=True): Returns: np.ndarray: The state-by-node TPM of the macro-system. - ''' + """ if not is_state_by_state(micro_tpm): micro_tpm = convert.state_by_node2state_by_state(micro_tpm) @@ -562,46 +564,46 @@ def macro_tpm(self, micro_tpm, check_independence=True): class Blackbox(namedtuple('Blackbox', ['partition', 'output_indices'])): - '''Class representing a blackboxing of a system. + """Class representing a blackboxing of a system. Attributes: partition (tuple[tuple[int]]): The partition of nodes into boxes. output_indices (tuple[int]): Outputs of the blackboxes. - ''' + """ # TODO: validate! # TODO: validate that output indices are ordered? @property def hidden_indices(self): - '''All elements hidden inside the blackboxes.''' + """All elements hidden inside the blackboxes.""" return tuple(sorted(set(self.micro_indices) - set(self.output_indices))) @property def micro_indices(self): - '''Indices of micro-elements in this blackboxing.''' + """Indices of micro-elements in this blackboxing.""" return tuple(sorted(idx for part in self.partition for idx in part)) @property def macro_indices(self): - '''Fresh indices of macro-elements of the blackboxing.''' + """Fresh indices of macro-elements of the blackboxing.""" return reindex(self.output_indices) def __len__(self): return len(self.partition) def outputs_of(self, partition_index): - '''The outputs of the partition at ``partition_index``. + """The outputs of the partition at ``partition_index``. Note that this returns a tuple of element indices, since coarse- grained blackboxes may have multiple outputs. - ''' + """ partition = self.partition[partition_index] outputs = set(partition).intersection(self.output_indices) return tuple(sorted(outputs)) def reindex(self): - '''Squeeze the indices of this blackboxing to ``0..n``. + """Squeeze the indices of this blackboxing to ``0..n``. Returns: Blackbox: a new, reindexed |Blackbox|. @@ -612,7 +614,7 @@ def reindex(self): >>> blackbox = Blackbox(partition, output_indices) >>> blackbox.reindex() Blackbox(partition=((1,), (0, 2)), output_indices=(0, 1)) - ''' + """ _map = dict(zip(self.micro_indices, reindex(self.micro_indices))) partition = tuple( tuple(_map[index] for index in group) @@ -623,7 +625,7 @@ def reindex(self): return Blackbox(partition, output_indices) def macro_state(self, micro_state): - '''Compute the macro-state of this blackbox. + """Compute the macro-state of this blackbox. This is just the state of the blackbox's output indices. @@ -633,14 +635,14 @@ def macro_state(self, micro_state): Returns: tuple[int]: The state of the output indices. - ''' + """ assert len(micro_state) == len(self.micro_indices) reindexed = self.reindex() return utils.state_of(reindexed.output_indices, micro_state) def in_same_box(self, a, b): - '''Returns ``True`` if nodes ``a`` and ``b``` are in the same box.''' + """Return ``True`` if nodes ``a`` and ``b``` are in the same box.""" assert a in self.micro_indices assert b in self.micro_indices @@ -651,12 +653,12 @@ def in_same_box(self, a, b): return False def hidden_from(self, a, b): - '''Returns True if ``a`` is hidden in a different box than ``b``.''' + """Return True if ``a`` is hidden in a different box than ``b``.""" return a in self.hidden_indices and not self.in_same_box(a, b) def _partitions_list(N): - '''Return a list of partitions of the |N| binary nodes. + """Return a list of partitions of the |N| binary nodes. Args: N (int): The number of nodes under consideration. @@ -668,7 +670,7 @@ def _partitions_list(N): Example: >>> _partitions_list(3) [[[0, 1], [2]], [[0, 2], [1]], [[0], [1, 2]], [[0], [1], [2]]] - ''' + """ if N < (_NUM_PRECOMPUTED_PARTITION_LISTS): return list(_partition_lists[N]) else: @@ -678,7 +680,7 @@ def _partitions_list(N): def all_partitions(indices): - '''Return a list of all possible coarse grains of a network. + """Return a list of all possible coarse grains of a network. Args: indices (tuple[int]): The micro indices to partition. @@ -686,7 +688,7 @@ def all_partitions(indices): Yields: tuple[tuple]: A possible partition. Each element of the tuple is a tuple of micro-elements which correspond to macro-elements. - ''' + """ n = len(indices) partitions = _partitions_list(n) if n > 0: @@ -698,7 +700,7 @@ def all_partitions(indices): def all_groupings(partition): - '''Return all possible groupings of states for a particular coarse graining + """Return all possible groupings of states for a particular coarse graining (partition) of a network. Args: @@ -710,7 +712,7 @@ def all_groupings(partition): system. TODO: document exactly how to interpret the grouping. - ''' + """ if not all(partition): raise ValueError('Each part of the partition must have at least one ' 'element.') @@ -725,25 +727,25 @@ def all_groupings(partition): def all_coarse_grains(indices): - '''Generator over all possible |CoarseGrains| of these indices. + """Generator over all possible |CoarseGrains| of these indices. Args: indices (tuple[int]): Node indices to coarse grain. Yields: CoarseGrain: The next |CoarseGrain| for ``indices``. - ''' + """ for partition in all_partitions(indices): for grouping in all_groupings(partition): yield CoarseGrain(partition, grouping) def all_coarse_grains_for_blackbox(blackbox): - '''Generator over all |CoarseGrains| for the given blackbox. + """Generator over all |CoarseGrains| for the given blackbox. If a box has multiple outputs, those outputs are partitioned into the same coarse-grain macro-element. - ''' + """ for partition in all_partitions(blackbox.output_indices): for grouping in all_groupings(partition): coarse_grain = CoarseGrain(partition, grouping) @@ -755,14 +757,14 @@ def all_coarse_grains_for_blackbox(blackbox): def all_blackboxes(indices): - '''Generator over all possible blackboxings of these indices. + """Generator over all possible blackboxings of these indices. Args: indices (tuple[int]): Nodes to blackbox. Yields: Blackbox: The next |Blackbox| of ``indices``. - ''' + """ for partition in all_partitions(indices): # TODO? don't consider the empty set here # (pass `nonempty=True` to `powerset`) @@ -776,17 +778,17 @@ def all_blackboxes(indices): class MacroNetwork: - '''A coarse-grained network of nodes. + """A coarse-grained network of nodes. See the :ref:`macro-micro` example in the documentation for more information. Attributes: network (Network): The network object of the macro-system. - phi (float): The |big_phi| of the network's main complex. + phi (float): The |big_phi| of the network's major complex. micro_network (Network): The network object of the corresponding micro system. - micro_phi (float): The |big_phi| of the main complex of the + micro_phi (float): The |big_phi| of the major complex of the corresponding micro-system. coarse_grain (CoarseGrain): The coarse-graining of micro-elements into macro-elements. @@ -794,7 +796,8 @@ class MacroNetwork: blackbox (Blackbox): The blackboxing of micro elements in the network. emergence (float): The difference between the |big_phi| of the macro- and the micro-system. - ''' + """ + def __init__(self, network, system, macro_phi, micro_phi, coarse_grain, time_scale=1, blackbox=None): @@ -812,12 +815,12 @@ def __str__(self): @property def emergence(self): - '''Difference between the |big_phi| of the macro and micro systems''' + """Difference between the |big_phi| of the macro and micro systems""" return round(self.phi - self.micro_phi, config.PRECISION) def coarse_grain(network, state, internal_indices): - '''Find the maximal coarse-graining of a micro-system. + """Find the maximal coarse-graining of a micro-system. Args: network (Network): The network in question. @@ -826,7 +829,7 @@ def coarse_grain(network, state, internal_indices): Returns: tuple[int, CoarseGrain]: The phi-value of the maximal |CoarseGrain|. - ''' + """ max_phi = float('-inf') max_coarse_grain = CoarseGrain((), ()) @@ -837,7 +840,7 @@ def coarse_grain(network, state, internal_indices): except ConditionallyDependentError: continue - phi = compute.big_phi(subsystem) + phi = compute.phi(subsystem) if (phi - max_phi) > constants.EPSILON: max_phi = phi max_coarse_grain = coarse_grain @@ -847,8 +850,7 @@ def coarse_grain(network, state, internal_indices): # TODO: refactor this def all_macro_systems(network, state, blackbox, coarse_grain, time_scales): - '''Generator over all possible macro-systems for the network.''' - + """Generator over all possible macro-systems for the network.""" if time_scales is None: time_scales = [1] @@ -886,7 +888,7 @@ def coarse_grains(blackbox, system): def emergence(network, state, blackbox=False, coarse_grain=True, time_scales=None): - '''Check for the emergence of a micro-system into a macro-system. + """Check for the emergence of a micro-system into a macro-system. Checks all possible blackboxings and coarse-grainings of a system to find the spatial scale with maximum integrated information. @@ -908,8 +910,8 @@ def emergence(network, state, blackbox=False, coarse_grain=True, Returns: MacroNetwork: The maximal macro-system generated from the micro-system. - ''' - micro_phi = compute.main_complex(network, state).phi + """ + micro_phi = compute.major_complex(network, state).phi max_phi = float('-inf') max_network = None @@ -917,7 +919,7 @@ def emergence(network, state, blackbox=False, coarse_grain=True, for subsystem in all_macro_systems(network, state, blackbox=blackbox, coarse_grain=coarse_grain, time_scales=time_scales): - phi = compute.big_phi(subsystem) + phi = compute.phi(subsystem) if (phi - max_phi) > constants.EPSILON: max_phi = phi @@ -939,7 +941,7 @@ def phi_by_grain(network, state): systems = utils.powerset(network.node_indices, nonempty=True) for system in systems: micro_subsystem = Subsystem(network, state, system) - phi = compute.big_phi(micro_subsystem) + phi = compute.phi(micro_subsystem) list_of_phi.append([len(micro_subsystem), phi, system, None]) for coarse_grain in all_coarse_grains(system): @@ -949,7 +951,7 @@ def phi_by_grain(network, state): except ConditionallyDependentError: continue - phi = compute.big_phi(subsystem) + phi = compute.phi(subsystem) list_of_phi.append([len(subsystem), phi, system, coarse_grain]) return list_of_phi @@ -958,7 +960,7 @@ def phi_by_grain(network, state): # TODO? give example of doing it for a bunch of coarse-grains in docstring # (make all groupings and partitions, make_network for each of them, etc.) def effective_info(network): - '''Return the effective information of the given network. + """Return the effective information of the given network. .. note:: For details, see: @@ -970,7 +972,7 @@ def effective_info(network): Available online: `doi: 10.1073/pnas.1314922110 `_. - ''' + """ validate.is_network(network) sbs_tpm = convert.state_by_node2state_by_state(network.tpm) diff --git a/pyphi/memory.py b/pyphi/memory.py index 8b49f39f3..bb737ddaf 100644 --- a/pyphi/memory.py +++ b/pyphi/memory.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # memory.py -''' +""" Decorators and objects for memoization. -''' +""" # pylint: disable=missing-docstring @@ -16,16 +16,17 @@ def cache(ignore=None): - '''Decorator for memoizing a function using either the filesystem or a - database.''' + """Decorator for memoizing a function using either the filesystem or a + database. + """ def joblib_decorator(func): - if func.__name__ == '_big_mip' and not config.CACHE_BIGMIPS: + if func.__name__ == '_sia' and not config.CACHE_SIAS: return func return constants.joblib_memory.cache(func, ignore=ignore) def db_decorator(func): - if func.__name__ == '_big_mip' and not config.CACHE_BIGMIPS: + if func.__name__ == '_sia' and not config.CACHE_SIAS: return func return DbMemoizedFunc(func, ignore) @@ -38,8 +39,7 @@ def db_decorator(func): class DbMemoizedFunc: - - '''A memoized function, with a databse backing the cache.''' + """A memoized function, with a databse backing the cache.""" def __init__(self, func, ignore): # Store a reference to the raw function, without any memoization. @@ -70,8 +70,9 @@ def __call__(self, *args, **kwargs): # TODO make this easier to use def get_output_key(self, args, kwargs): - '''Return the key that the output should be cached with, given - arguments, keyword arguments, and a list of arguments to ignore.''' + """Return the key that the output should be cached with, given + arguments, keyword arguments, and a list of arguments to ignore. + """ # Get a dictionary mapping argument names to argument values where # ignored arguments are omitted. filtered_args = joblib.func_inspect.filter_args( @@ -82,5 +83,5 @@ def get_output_key(self, args, kwargs): return db.generate_key(filtered_args) def load_output(self, args, kwargs): - '''Return cached output.''' + """Return cached output.""" return db.find(self.get_output_key(args, kwargs)) diff --git a/pyphi/models/__init__.py b/pyphi/models/__init__.py index 67f83a1e6..46d5530eb 100644 --- a/pyphi/models/__init__.py +++ b/pyphi/models/__init__.py @@ -2,28 +2,49 @@ # -*- coding: utf-8 -*- # models/__init__.py -'''See |models.big_phi|, |models.concept|, and |models.cuts| for documentation. +""" +See |models.subsystem|, |models.mechanism|, and |models.cuts| for +documentation. Attributes: - BigMip: Alias for :class:`big_phi.BigMip` - Mip: Alias for :class:`concept.Mip` - Mice: Alias for :class:`concept.Mice` - Concept: Alias for :class:`concept.Concept` - Constellation: Alias for :class:`concept.Constellation` - Cut: Alias for :class:`cuts.Cut` - Part: Alias for :class:`cuts.Part` - Bipartition: Alias for :class:`cuts.Bipartition` - ActualCut: Alias for :class:`cuts.ActualCut` - AcMip: Alias for :class:`actual_causation.AcMip` - CausalLink: Alias for :class:`actual_causation.CausalLink` - AcBigMip: Alias for :class:`actual_causation.AcBigMip` - Account: Alias for :class:`actual_causation.Account` - DirectedAccount: Alias for :class:`actual_causation.DirectedAccount` -''' + Account: Alias for :class:`pyphi.models.actual_causation.Account`. + AcRepertoireIrreducibilityAnalysis: Alias for. + :class:`pyphi.models.actual_causation.AcRepertoireIrreducibilityAnalysis`. + AcSystemIrreducibilityAnalysis: Alias for + :class:`pyphi.models.actual_causation.AcSystemIrreducibilityAnalysis`. + ActualCut: Alias for :class:`pyphi.models.cuts.ActualCut`. + Bipartition: Alias for :class:`pyphi.models.cuts.Bipartition`. + CausalLink: Alias for :class:`pyphi.models.actual_causation.CausalLink`. + CauseEffectStructure: Alias for + :class:`pyphi.models.subsystem.CauseEffectStructure`. + Concept: Alias for :class:`pyphi.models.mechanism.Concept`. + Cut: Alias for :class:`pyphi.models.cuts.Cut`. + DirectedAccount: Alias for + :class:`pyphi.models.actual_causation.DirectedAccount`. + MaximallyIrreducibleCause: Alias for + :class:`pyphi.models.mechanism.MaximallyIrreducibleCause`. + MaximallyIrreducibleEffect: Alias for + :class:`pyphi.models.mechanism.MaximallyIrreducibleEffect`. + MaximallyIrreducibleCauseOrEffect: Alias for + :class:`pyphi.models.mechanism.MaximallyIrreducibleCauseOrEffect`. + Part: Alias for :class:`pyphi.models.cuts.Part`. + RepertoireIrreducibilityAnalysis: Alias for + :class:`pyphi.models.mechanism.RepertoireIrreducibilityAnalysis`. + SystemIrreducibilityAnalysis: Alias for + :class:`pyphi.models.subsystem.SystemIrreducibilityAnalysis`. +""" -from .actual_causation import (AcBigMip, CausalLink, AcMip, _null_ac_mip, Event, - _null_ac_bigmip, DirectedAccount, Account) -from .big_phi import BigMip, _null_bigmip -from .concept import (Mip, _null_mip, Mice, Concept, Constellation, - normalize_constellation) -from .cuts import ActualCut, Cut, Part, Bipartition, NullCut, Tripartition, KPartition, KCut +# pylint: disable=unused-import + +from .actual_causation import (AcSystemIrreducibilityAnalysis, CausalLink, + AcRepertoireIrreducibilityAnalysis, + _null_ac_ria, Event, _null_ac_sia, + DirectedAccount, Account) +from .subsystem import (SystemIrreducibilityAnalysis, _null_sia, + CauseEffectStructure) +from .mechanism import (RepertoireIrreducibilityAnalysis, _null_ria, + MaximallyIrreducibleCauseOrEffect, + MaximallyIrreducibleCause, MaximallyIrreducibleEffect, + Concept) +from .cuts import (ActualCut, Cut, Part, Bipartition, NullCut, Tripartition, + KPartition, KCut) diff --git a/pyphi/models/actual_causation.py b/pyphi/models/actual_causation.py index 0f726d6f5..a590c8876 100644 --- a/pyphi/models/actual_causation.py +++ b/pyphi/models/actual_causation.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # models/actual_causation.py -''' +""" Objects that represent structures used in actual causation. -''' +""" # pylint: disable=too-many-arguments @@ -13,21 +13,25 @@ from . import cmp, fmt from .. import Direction, config, utils -# TODO(slipperyhank): Why do we even need this? # TODO(slipperyhank): add second state -_acmip_attributes = ['alpha', 'state', 'direction', 'mechanism', 'purview', +_acria_attributes = ['alpha', 'state', 'direction', 'mechanism', 'purview', 'partition', 'probability', 'partitioned_probability'] -_acmip_attributes_for_eq = ['alpha', 'state', 'direction', 'mechanism', +_acria_attributes_for_eq = ['alpha', 'state', 'direction', 'mechanism', 'purview', 'probability'] + def greater_than_zero(alpha): - '''Return ``True`` if alpha is greater than zero, accounting for - numerical errors.''' + """Return ``True`` if alpha is greater than zero, accounting for + numerical errors. + """ return alpha > 0 and not utils.eq(alpha, 0) -class AcMip(cmp.Orderable, namedtuple('AcMip', _acmip_attributes)): - '''A minimum information partition for ac_coef calculation. +class AcRepertoireIrreducibilityAnalysis( + cmp.Orderable, namedtuple('AcRepertoireIrreducibilityAnalysis', + _acria_attributes)): + """A minimum information partition for ac_coef calculation. + These can be compared with the built-in Python comparison operators (``<``, ``>``, etc.). First, |alpha| values are compared. Then, if these are equal @@ -38,12 +42,12 @@ class AcMip(cmp.Orderable, namedtuple('AcMip', _acmip_attributes)): This is the difference between the mechanism's unpartitioned and partitioned actual probability. state (tuple[int]): - state of system in specified direction (past, future) + state of system in specified direction (cause, effect) direction (str): - The temporal direction specifiying whether this AcMIP should be + The temporal direction specifiying whether this analysis should be calculated with cause or effect repertoires. mechanism (tuple[int]): - The mechanism over which to evaluate the AcMIP. + The mechanism to analyze. purview (tuple[int]): The purview over which the unpartitioned actual probability differs the least from the actual probability of the partition. @@ -51,10 +55,11 @@ class AcMip(cmp.Orderable, namedtuple('AcMip', _acmip_attributes)): The partition that makes the least difference to the mechanism's repertoire. probability (float): - The probability of the state in the past/future. + The probability of the state in the previous/next timestep. partitioned_probability (float): The probability of the state in the partitioned repertoire. - ''' + """ + __slots__ = () unorderable_unless_eq = ['direction'] @@ -67,142 +72,153 @@ def order_by(self): def __eq__(self, other): # TODO(slipperyhank): include 2nd state here? - return cmp.general_eq(self, other, _acmip_attributes_for_eq) + return cmp.general_eq(self, other, _acria_attributes_for_eq) def __bool__(self): - '''An |AcMip| is ``True`` if it has |alpha > 0|.''' + """An |AcRepertoireIrreducibilityAnalysis| is ``True`` if it has + |alpha > 0|. + """ return greater_than_zero(self.alpha) @property def phi(self): - '''Alias for |alpha| for PyPhi utility functions.''' + """Alias for |alpha| for PyPhi utility functions.""" return self.alpha def __hash__(self): - attrs = tuple(getattr(self, attr) for attr in _acmip_attributes_for_eq) + attrs = tuple(getattr(self, attr) for attr in _acria_attributes_for_eq) return hash(attrs) def to_json(self): - '''Return a JSON-serializable representation.''' - return {attr: getattr(self, attr) for attr in _acmip_attributes} + """Return a JSON-serializable representation.""" + return {attr: getattr(self, attr) for attr in _acria_attributes} def __repr__(self): - return fmt.make_repr(self, _acmip_attributes) + return fmt.make_repr(self, _acria_attributes) def __str__(self): - return "Mip\n" + fmt.indent(fmt.fmt_ac_mip(self)) + return ("RepertoireIrreducibilityAnalysis\n" + + fmt.indent(fmt.fmt_ac_sia(self))) -def _null_ac_mip(state, direction, mechanism, purview): - return AcMip(state=state, - direction=direction, - mechanism=mechanism, - purview=purview, - partition=None, - probability=None, - partitioned_probability=None, - alpha=0.0) +def _null_ac_ria(state, direction, mechanism, purview): + return AcRepertoireIrreducibilityAnalysis( + state=state, + direction=direction, + mechanism=mechanism, + purview=purview, + partition=None, + probability=None, + partitioned_probability=None, + alpha=0.0 + ) class CausalLink(cmp.Orderable): - '''A maximally irreducible actual cause or effect. + """A maximally irreducible actual cause or effect. These can be compared with the built-in Python comparison operators (``<``, ``>``, etc.). First, |alpha| values are compared. Then, if these are equal up to |PRECISION|, the size of the mechanism is compared. - ''' + """ - def __init__(self, mip): - self._mip = mip + def __init__(self, ria): + self._ria = ria @property def alpha(self): - '''float: The difference between the mechanism's unpartitioned and + """float: The difference between the mechanism's unpartitioned and partitioned actual probabilities. - ''' - return self._mip.alpha + """ + return self._ria.alpha @property def phi(self): - '''Alias for |alpha| for PyPhi utility functions.''' + """Alias for |alpha| for PyPhi utility functions.""" return self.alpha @property def direction(self): - '''Direction: Either |PAST| or |FUTURE|.''' - return self._mip.direction + """Direction: Either |CAUSE| or |EFFECT|.""" + return self._ria.direction @property def mechanism(self): - '''list[int]: The mechanism for which the action is evaluated.''' - return self._mip.mechanism + """list[int]: The mechanism for which the action is evaluated.""" + return self._ria.mechanism @property def purview(self): - '''list[int]: The purview over which this mechanism's |alpha| is + """list[int]: The purview over which this mechanism's |alpha| is maximal. - ''' - return self._mip.purview + """ + return self._ria.purview @property - def mip(self): - '''AcMip: The minimum information partition for this mechanism.''' - return self._mip + def ria(self): + """AcRepertoireIrreducibilityAnalysis: The irreducibility analysis for + this mechanism. + """ + return self._ria def __repr__(self): - return fmt.make_repr(self, ['mip']) + return fmt.make_repr(self, ['ria']) def __str__(self): - return "CausalLink\n" + fmt.indent(fmt.fmt_ac_mip(self.mip)) + return "CausalLink\n" + fmt.indent(fmt.fmt_ac_ria(self.ria)) - unorderable_unless_eq = AcMip.unorderable_unless_eq + unorderable_unless_eq = \ + AcRepertoireIrreducibilityAnalysis.unorderable_unless_eq def order_by(self): - return self.mip.order_by() + return self.ria.order_by() def __eq__(self, other): - return self.mip == other.mip + return self.ria == other.ria def __hash__(self): - return hash(('CausalLink', self._mip)) + return hash(('CausalLink', self._ria)) def __bool__(self): - '''An |CausalLink| is ``True`` if |alpha > 0|.''' + """An |CausalLink| is ``True`` if |alpha > 0|.""" return greater_than_zero(self.alpha) def to_json(self): - '''Return a JSON-serializable representation.''' - return {'mip': self.mip} + """Return a JSON-serializable representation.""" + return {'ria': self.ria} class Event(namedtuple('Event', ['actual_cause', 'actual_effect'])): - '''A mechanism which has both an actual cause and an actual effect. + """A mechanism which has both an actual cause and an actual effect. Attributes: actual_cause (CausalLink): The actual cause of the mechanism. actual_effect (CausalLink): The actual effect of the mechanism. - ''' + """ + @property def mechanism(self): - '''The mechanism of the event.''' + """The mechanism of the event.""" assert self.actual_cause.mechanism == self.actual_effect.mechanism return self.actual_cause.mechanism class Account(tuple): - '''The set of |CausalLinks| with |alpha > 0|. This includes both actual - causes and actual effects.''' + """The set of |CausalLinks| with |alpha > 0|. This includes both actual + causes and actual effects. + """ @property def irreducible_causes(self): - '''The set of irreducible causes in this |Account|.''' - return tuple(link for link in self if link.direction is Direction.PAST) + """The set of irreducible causes in this |Account|.""" + return tuple(link for link in self + if link.direction is Direction.CAUSE) @property def irreducible_effects(self): - '''The set of irreducible effects in this |Account|.''' + """The set of irreducible effects in this |Account|.""" return tuple(link for link in self - if link.direction is Direction.FUTURE) + if link.direction is Direction.EFFECT) def __repr__(self): if config.REPR_VERBOSITY > 0: @@ -222,54 +238,60 @@ def from_json(cls, dct): class DirectedAccount(Account): - '''The set of |CausalLinks| with |alpha > 0| for one direction of a - transition.''' + """The set of |CausalLinks| with |alpha > 0| for one direction of a + transition. + """ + pass -_acbigmip_attributes = ['alpha', 'direction', 'unpartitioned_account', - 'partitioned_account', 'transition', 'cut'] +_ac_sia_attributes = ['alpha', 'direction', 'account', 'partitioned_account', + 'transition', 'cut'] # TODO(slipperyhank): Check if we do the same, i.e. take the bigger system, or # take the smaller? -class AcBigMip(cmp.Orderable): - '''A minimum information partition for |big_alpha| calculation. +class AcSystemIrreducibilityAnalysis(cmp.Orderable): + """An analysis of transition-level irreducibility (|big_alpha|). + + Contains the |big_alpha| value of the |Transition|, the causal account, and + all the intermediate results obtained in the course of computing them. Attributes: alpha (float): The |big_alpha| value for the transition when taken - against this MIP, *i.e.* the difference between the unpartitioned - account and this MIP's partitioned account. - unpartitioned_account (Account): The account of the whole transition. + against this analysis, *i.e.* the difference between the + unpartitioned account and this analysis's partitioned account. + account (Account): The account of the whole transition. partitioned_account (Account): The account of the partitioned transition. - transition (Transition): The transition this MIP was calculated for. + transition (Transition): The transition this analysis was calculated + for. cut (ActualCut): The minimal partition. - ''' + """ - def __init__(self, alpha=None, direction=None, unpartitioned_account=None, + def __init__(self, alpha=None, direction=None, account=None, partitioned_account=None, transition=None, cut=None): self.alpha = alpha self.direction = direction - self.unpartitioned_account = unpartitioned_account + self.account = account self.partitioned_account = partitioned_account self.transition = transition self.cut = cut def __repr__(self): - return fmt.make_repr(self, _acbigmip_attributes) + return fmt.make_repr(self, _ac_sia_attributes) def __str__(self): - return fmt.fmt_ac_big_mip(self) + return fmt.fmt_ac_sia(self) @property def before_state(self): - '''Return the actual past state of the |Transition|.''' + """Return the actual previous state of the |Transition|.""" return self.transition.before_state @property def after_state(self): - '''Return the actual current state of the |Transition|.''' + """Return the actual current state of the |Transition|.""" return self.transition.after_state unorderable_unless_eq = ['direction'] @@ -279,25 +301,31 @@ def order_by(self): return [self.alpha, len(self.transition)] def __eq__(self, other): - return cmp.general_eq(self, other, _acbigmip_attributes) + return cmp.general_eq(self, other, _ac_sia_attributes) def __bool__(self): - '''An |AcBigMip| is ``True`` if it has |big_alpha > 0|.''' + """An |AcSystemIrreducibilityAnalysis| is ``True`` if it has + |big_alpha > 0|. + """ return greater_than_zero(self.alpha) def __hash__(self): - return hash((self.alpha, self.unpartitioned_account, + return hash((self.alpha, self.account, self.partitioned_account, self.transition, self.cut)) def to_json(self): - return {attr: getattr(self, attr) for attr in _acbigmip_attributes} - - -def _null_ac_bigmip(transition, direction, alpha=0.0): - '''Returns an |AcBigMip| with zero |big_alpha| and empty accounts.''' - return AcBigMip(transition=transition, - direction=direction, - alpha=alpha, - unpartitioned_account=(), - partitioned_account=()) + return {attr: getattr(self, attr) for attr in _ac_sia_attributes} + + +def _null_ac_sia(transition, direction, alpha=0.0): + """Return an |AcSystemIrreducibilityAnalysis| with zero |big_alpha| and + empty accounts. + """ + return AcSystemIrreducibilityAnalysis( + transition=transition, + direction=direction, + alpha=alpha, + account=(), + partitioned_account=() + ) diff --git a/pyphi/models/big_phi.py b/pyphi/models/big_phi.py deleted file mode 100644 index a326cc909..000000000 --- a/pyphi/models/big_phi.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# models/big_phi.py - -'''Objects that represent cause-effect structures.''' - -from . import cmp, fmt -from .. import utils - -# pylint: disable=too-many-arguments - -_bigmip_attributes = ['phi', 'unpartitioned_constellation', - 'partitioned_constellation', 'subsystem', - 'cut_subsystem'] - - -class BigMip(cmp.Orderable): - '''A minimum information partition for |big_phi| calculation. - - These can be compared with the built-in Python comparison operators (``<``, - ``>``, etc.). First, |big_phi| values are compared. Then, if these are - equal up to |PRECISION|, the one with the larger subsystem is greater. - - Attributes: - phi (float): The |big_phi| value for the subsystem when taken against - this MIP, *i.e.* the difference between the unpartitioned - constellation and this MIP's partitioned constellation. - unpartitioned_constellation (Constellation): The constellation of the - whole subsystem. - partitioned_constellation (Constellation): The constellation when the - subsystem is cut. - subsystem (Subsystem): The subsystem this MIP was calculated for. - cut_subsystem (Subsystem): The subsystem with the minimal cut applied. - time (float): The number of seconds it took to calculate. - small_phi_time (float): The number of seconds it took to calculate the - unpartitioned constellation. - ''' - - def __init__(self, phi=None, unpartitioned_constellation=None, - partitioned_constellation=None, subsystem=None, - cut_subsystem=None, time=None, small_phi_time=None): - self.phi = phi - self.unpartitioned_constellation = unpartitioned_constellation - self.partitioned_constellation = partitioned_constellation - self.subsystem = subsystem - self.cut_subsystem = cut_subsystem - self.time = time - self.small_phi_time = small_phi_time - - def __repr__(self): - return fmt.make_repr(self, _bigmip_attributes) - - def __str__(self, constellations=True): - return fmt.fmt_big_mip(self, constellations=constellations) - - def print(self, constellations=True): - '''Print this ``BigMip``, optionally without constellations.''' - print(self.__str__(constellations=constellations)) - - @property - def cut(self): - '''The unidirectional cut that makes the least difference to the - subsystem. - ''' - return self.cut_subsystem.cut - - @property - def network(self): - '''The network this |BigMip| belongs to.''' - return self.subsystem.network - - unorderable_unless_eq = ['network'] - - def order_by(self): - return [self.phi, len(self.subsystem), self.subsystem.node_indices] - - def __eq__(self, other): - return cmp.general_eq(self, other, _bigmip_attributes) - - def __bool__(self): - '''A |BigMip| is ``True`` if it has |big_phi > 0|.''' - return not utils.eq(self.phi, 0) - - def __hash__(self): - return hash((self.phi, - self.unpartitioned_constellation, - self.partitioned_constellation, - self.subsystem, - self.cut_subsystem)) - - def to_json(self): - '''Return a JSON-serializable representation.''' - return { - attr: getattr(self, attr) - for attr in _bigmip_attributes + ['time', 'small_phi_time'] - } - - -def _null_bigmip(subsystem, phi=0.0): - '''Return a |BigMip| with zero |big_phi| and empty constellations. - - This is the MIP associated with a reducible subsystem. - ''' - return BigMip(subsystem=subsystem, cut_subsystem=subsystem, phi=phi, - unpartitioned_constellation=(), partitioned_constellation=()) diff --git a/pyphi/models/cmp.py b/pyphi/models/cmp.py index b67770a23..96253e5a4 100644 --- a/pyphi/models/cmp.py +++ b/pyphi/models/cmp.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # models/cmp.py -''' +""" Utilities for comparing phi-objects. -''' +""" import functools from collections import Iterable @@ -13,17 +13,18 @@ from .. import utils + # Rich comparison (ordering) helpers # ============================================================================= def sametype(func): - '''Method decorator to return ``NotImplemented`` if the args of the wrapped + """Method decorator to return ``NotImplemented`` if the args of the wrapped method are of different types. When wrapping a rich model comparison method this will delegate (reflect) the comparison to the right-hand-side object, or fallback by passing it up the inheritance tree. - ''' + """ @functools.wraps(func) def wrapper(self, other): # pylint: disable=missing-docstring if type(other) is not type(self): @@ -33,7 +34,7 @@ def wrapper(self, other): # pylint: disable=missing-docstring class Orderable: - '''Base mixin for implementing rich object comparisons on phi-objects. + """Base mixin for implementing rich object comparisons on phi-objects. Both ``__eq__`` and `order_by`` need to be implemented on the subclass. The ``order_by`` method returns a list of attributes which are compared @@ -41,22 +42,23 @@ class Orderable: Subclasses can optionally set a value for `unorderable_unless_eq`. This attribute controls whether objects are orderable: if all attributes listed - in `unorderable_unless_eq` are not equal then the objects are not - orderable with respect to one another and a TypeError is raised. For - example: it doesn't make sense to compare ``Concepts`` unless they are - from the same ``Subsystem`` or compare ``Mips`` with different directions. - ''' + in `unorderable_unless_eq` are not equal then the objects are not orderable + with respect to one another and a TypeError is raised. For example: it + doesn't make sense to compare ``Concepts`` unless they are from the same + ``Subsystem`` or compare ``MechanismIrreducibilityAnalyses`` with different + directions. + """ # pylint: disable=too-few-public-methods # The object is not orderable unless these attributes are all equal unorderable_unless_eq = [] def order_by(self): - '''Return a list of values to compare for ordering. + """Return a list of values to compare for ordering. The first value in the list has the greatest priority; if the first objects are equal the second object is compared, etc. - ''' + """ raise NotImplementedError @sametype @@ -93,13 +95,13 @@ def __ne__(self, other): # TODO use builtin numpy methods here def numpy_aware_eq(a, b): - '''Return whether two objects are equal via recursion, using + """Return whether two objects are equal via recursion, using :func:`numpy.array_equal` for comparing numpy arays. - ''' + """ if isinstance(a, np.ndarray) or isinstance(b, np.ndarray): return np.array_equal(a, b) - if ((isinstance(a, Iterable) and isinstance(b, Iterable)) - and not isinstance(a, str) and not isinstance(b, str)): + if ((isinstance(a, Iterable) and isinstance(b, Iterable)) and + not isinstance(a, str) and not isinstance(b, str)): if len(a) != len(b): return False return all(numpy_aware_eq(x, y) for x, y in zip(a, b)) @@ -107,13 +109,13 @@ def numpy_aware_eq(a, b): def general_eq(a, b, attributes): - '''Return whether two objects are equal up to the given attributes. + """Return whether two objects are equal up to the given attributes. If an attribute is called ``'phi'``, it is compared up to |PRECISION|. If an attribute is called ``'mechanism'`` or ``'purview'``, it is compared using set equality. All other attributes are compared with :func:`numpy_aware_eq`. - ''' + """ try: for attr in attributes: _a, _b = getattr(a, attr), getattr(b, attr) diff --git a/pyphi/models/cuts.py b/pyphi/models/cuts.py index eba6e5d12..91f0cf7d1 100644 --- a/pyphi/models/cuts.py +++ b/pyphi/models/cuts.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # models/cuts.py -'''Objects that represent partitions of sets of nodes.''' +"""Objects that represent partitions of sets of nodes.""" from collections import namedtuple from itertools import chain @@ -14,18 +14,19 @@ class _CutBase: - '''Base class for all unidirectional system cuts. + """Base class for all unidirectional system cuts. Concrete cut classes must implement a ``cut_matrix`` method and an ``indices`` property. See ``Cut`` for a concrete example. - ''' + """ + @property def indices(self): - '''Return the indices of this cut.''' + """Indices of this cut.""" raise NotImplementedError def cut_matrix(self, n): - '''Return the cut matrix for this cut. + """Return the cut matrix for this cut. The cut matrix is a square matrix representing connections severed by the cut: if the connection from node `a` to node `b` is cut, @@ -33,40 +34,40 @@ def cut_matrix(self, n): Args: n (int): The size of the network. - ''' + """ raise NotImplementedError @property def is_null(self): - '''Is this cut a null cut? + """Is this cut a null cut? All concrete cuts should return ``False``. - ''' + """ return False def apply_cut(self, cm): - '''Return a modified connectivity matrix with all connections that are + """Return a modified connectivity matrix with all connections that are severed by this cut removed. Args: cm (np.ndarray): A connectivity matrix. - ''' + """ # Invert the cut matrix, creating a matrix of preserved connections inverse = np.logical_not(self.cut_matrix(cm.shape[0])).astype(int) return cm * inverse def cuts_connections(self, a, b): - '''Check if this cut severs any connections from ``a`` to ``b``. + """Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. - ''' + """ n = max(self.indices) + 1 return self.cut_matrix(n)[np.ix_(a, b)].any() def splits_mechanism(self, mechanism): - '''Check if this cut splits a mechanism. + """Check if this cut splits a mechanism. Args: mechanism (tuple[int]): The mechanism in question. @@ -74,37 +75,37 @@ def splits_mechanism(self, mechanism): Returns: bool: ``True`` if `mechanism` has elements on both sides of the cut; ``False`` otherwise. - ''' + """ return self.cuts_connections(mechanism, mechanism) def all_cut_mechanisms(self): - '''Return all mechanisms with elements on both sides of this cut. + """Return all mechanisms with elements on both sides of this cut. Returns: tuple[tuple[int]] - ''' + """ all_mechanisms = utils.powerset(self.indices, nonempty=True) return tuple(m for m in all_mechanisms if self.splits_mechanism(m)) class NullCut(_CutBase): - '''The cut that does nothing.''' + """The cut that does nothing.""" def __init__(self, indices): self._indices = indices @property def is_null(self): - '''This is the only cut where ``is_null == True``.''' + """This is the only cut where ``is_null == True``.""" return True @property def indices(self): - '''Indices of the cut.''' + """Indices of the cut.""" return self._indices def cut_matrix(self, n): - '''Return a matrix of zeros.''' + """Return a matrix of zeros.""" return np.zeros((n, n)) def to_json(self): @@ -125,25 +126,26 @@ def __hash__(self): class Cut(namedtuple('Cut', ['from_nodes', 'to_nodes']), _CutBase): - '''Represents a unidirectional cut. + """Represents a unidirectional cut. Attributes: from_nodes (tuple[int]): Connections from this group of nodes to those in ``to_nodes`` are from_nodes. to_nodes (tuple[int]): Connections to this group of nodes from those in ``from_nodes`` are from_nodes. - ''' + """ # Don't construct an attribute dictionary; see # https://docs.python.org/3.3/reference/datamodel.html#notes-on-using-slots + __slots__ = () @property def indices(self): - '''Returns the indices of this cut.''' + """Indices of this cut.""" return tuple(sorted(set(self[0] + self[1]))) def cut_matrix(self, n): - '''Compute the cut matrix for this cut. + """Compute the cut matrix for this cut. The cut matrix is a square matrix which represents connections severed by the cut. @@ -157,7 +159,7 @@ def cut_matrix(self, n): array([[ 0., 0., 0.], [ 0., 0., 1.], [ 0., 0., 0.]]) - ''' + """ return connectivity.relevant_connections(n, self[0], self[1]) def __repr__(self): @@ -167,12 +169,12 @@ def __str__(self): return fmt.fmt_cut(self) def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return {'from_nodes': self.from_nodes, 'to_nodes': self.to_nodes} class KCut(_CutBase): - '''A cut that severs all connections between parts of a K-partition.''' + """A cut that severs all connections between parts of a K-partition.""" def __init__(self, direction, partition): self.direction = direction @@ -184,7 +186,7 @@ def indices(self): return self.partition.mechanism def cut_matrix(self, n): - '''The matrix of connections that are severed by this cut.''' + """The matrix of connections that are severed by this cut.""" cm = np.zeros((n, n)) for part in self.partition: @@ -215,7 +217,7 @@ def to_json(self): class ActualCut(KCut): - '''Represents an cut for a |Transition|.''' + """Represents an cut for a |Transition|.""" @property def indices(self): @@ -224,7 +226,7 @@ def indices(self): class Part(namedtuple('Part', ['mechanism', 'purview'])): - '''Represents one part of a |Bipartition|. + """Represents one part of a |Bipartition|. Attributes: mechanism (tuple[int]): The nodes in the mechanism for this part. @@ -239,41 +241,42 @@ class Part(namedtuple('Part', ['mechanism', 'purview'])): purview: B A,C This class represents one term in the above product. - ''' + """ __slots__ = () def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return {'mechanism': self.mechanism, 'purview': self.purview} class KPartition(tuple): - '''A partition with an arbitrary number of parts.''' + """A partition with an arbitrary number of parts.""" + __slots__ = () def __new__(cls, *args): - '''Construct the base tuple with multiple |Part| arguments.''' + """Construct the base tuple with multiple |Part| arguments.""" return super().__new__(cls, args) def __getnewargs__(self): - '''And support unpickling with this ``__new__`` signature.''' + """And support unpickling with this ``__new__`` signature.""" return tuple(self) @property def mechanism(self): - '''tuple[int]: The nodes of the mechanism in the partition.''' + """tuple[int]: The nodes of the mechanism in the partition.""" return tuple(sorted( chain.from_iterable(part.mechanism for part in self))) @property def purview(self): - '''tuple[int]: The nodes of the purview in the partition.''' + """tuple[int]: The nodes of the purview in the partition.""" return tuple(sorted( chain.from_iterable(part.purview for part in self))) def normalize(self): - '''Normalize the order of parts in the partition.''' + """Normalize the order of parts in the partition.""" return type(self)(*sorted(self)) def __str__(self): @@ -294,16 +297,17 @@ def from_json(cls, dct): class Bipartition(KPartition): - '''A bipartition of a mechanism and purview. + """A bipartition of a mechanism and purview. Attributes: part0 (Part): The first part of the partition. part1 (Part): The second part of the partition. - ''' + """ + __slots__ = () def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return {'part0': self[0], 'part1': self[1]} @classmethod @@ -312,5 +316,6 @@ def from_json(cls, dct): class Tripartition(KPartition): + """A partition with three parts.""" __slots__ = () diff --git a/pyphi/models/fmt.py b/pyphi/models/fmt.py index e4b4bb24d..a0abb645c 100644 --- a/pyphi/models/fmt.py +++ b/pyphi/models/fmt.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # models/fmt.py -''' +""" Helper functions for formatting pretty representations of PyPhi models. -''' +""" from fractions import Fraction from itertools import chain @@ -45,7 +45,7 @@ def make_repr(self, attrs): - '''Construct a repr string. + """Construct a repr string. If `config.REPR_VERBOSITY` is ``1`` or ``2``, this function calls the object's __str__ method. Although this breaks the convention that __repr__ @@ -60,7 +60,7 @@ def make_repr(self, attrs): Returns: str: the ``repr``esentation of the object - ''' + """ # TODO: change this to a closure so we can do # __repr__ = make_repr(attrs) ??? @@ -73,9 +73,11 @@ def make_repr(self, attrs): ', '.join(attr + '=' + repr(getattr(self, attr)) for attr in attrs)) + raise ValueError('Invalid value for `config.REPR_VERBOSITY`') + def indent(lines, amount=2, char=' '): - '''Indent a string. + r"""Indent a string. Prepends whitespace to every line in the passed string. (Lines are separated by newline characters.) @@ -91,22 +93,22 @@ def indent(lines, amount=2, char=' '): str: The indented string. Example: - >>> print(indent('line1\\nline2', char='*')) + >>> print(indent('line1\nline2', char='*')) **line1 **line2 - ''' + """ lines = str(lines) padding = amount * char return padding + ('\n' + padding).join(lines.split('\n')) def margin(text): - '''Add a margin to both ends of each line in the string. + r"""Add a margin to both ends of each line in the string. Example: - >>> margin('line1\\nline2') - ' line1 \\n line2 ' - ''' + >>> margin('line1\nline2') + ' line1 \n line2 ' + """ lines = str(text).split('\n') return '\n'.join(' {} '.format(l) for l in lines) @@ -115,15 +117,15 @@ def margin(text): def box(text): - '''Wrap a chunk of text in a box. + r"""Wrap a chunk of text in a box. Example: - >>> print(box('line1\\nline2')) + >>> print(box('line1\nline2')) ┌───────┐ │ line1 │ │ line2 │ └───────┘ - ''' + """ lines = text.split('\n') width = max(len(l) for l in lines) @@ -138,18 +140,18 @@ def box(text): def side_by_side(left, right): - '''Put two boxes next to each other. + r"""Put two boxes next to each other. Assumes that all lines in the boxes are the same width. Example: - >>> left = 'A \\nC ' - >>> right = 'B\\nD' + >>> left = 'A \nC ' + >>> right = 'B\nD' >>> print(side_by_side(left, right)) A B C D - ''' + """ left_lines = list(left.split('\n')) right_lines = list(right.split('\n')) @@ -166,10 +168,10 @@ def side_by_side(left, right): def header(head, text, over_char=None, under_char=None, center=True): - '''Center a head over a block of text. + """Center a head over a block of text. The width of the text is the width of the longest line of the text. - ''' + """ lines = list(text.split('\n')) width = max(len(l) for l in lines) @@ -191,18 +193,18 @@ def header(head, text, over_char=None, under_char=None, center=True): def labels(indices, subsystem=None): - '''Get the labels for a tuple of mechanism indices.''' + """Get the labels for a tuple of mechanism indices.""" if subsystem is None: return tuple(map(str, indices)) return subsystem.indices2labels(indices) def fmt_number(p): - '''Format a number. + """Format a number. It will be printed as a fraction if the denominator isn't too big and as a decimal otherwise. - ''' + """ formatted = '{:n}'.format(p) if not config.PRINT_FRACTIONS: @@ -218,19 +220,19 @@ def fmt_number(p): def fmt_mechanism(indices, subsystem=None): - '''Format a mechanism or purview.''' + """Format a mechanism or purview.""" return '[' + ', '.join(labels(indices, subsystem)) + ']' def fmt_part(part, subsystem=None): - '''Format a |Part|. + """Format a |Part|. The returned string looks like:: 0,1 ─── ∅ - ''' + """ def nodes(x): # pylint: disable=missing-docstring return ','.join(labels(x, subsystem)) if x else EMPTY_SET @@ -248,7 +250,7 @@ def nodes(x): # pylint: disable=missing-docstring def fmt_bipartition(partition, subsystem=None): - '''Format a |Bipartition|. + """Format a |Bipartition|. The returned string looks like:: @@ -261,7 +263,7 @@ def fmt_bipartition(partition, subsystem=None): Returns: str: A human-readable string representation of the partition. - ''' + """ if not partition: return '' @@ -280,13 +282,13 @@ def fmt_bipartition(partition, subsystem=None): return ''.join(chain.from_iterable(zip(*elements))) -def fmt_constellation(c, title=None): - '''Format a constellation.''' +def fmt_ces(c, title=None): + """Format a |CauseEffectStructure|.""" if not c: return '()\n' if title is None: - title = 'Constellation' + title = 'Cause-effect structure' concepts = '\n'.join(margin(x) for x in c) + '\n' title = '{} ({} concept{})'.format( @@ -296,15 +298,17 @@ def fmt_constellation(c, title=None): def fmt_concept(concept): - '''Format a |Concept|.''' + """Format a |Concept|.""" def fmt_cause_or_effect(x): # pylint: disable=missing-docstring if not x: return '' - return box(indent(fmt_mip(x.mip, verbose=False), amount=1)) + return box(indent(fmt_ria(x.ria, verbose=False, mip=True), amount=1)) - cause = header('Cause', fmt_cause_or_effect(concept.cause)) - effect = header('Effect', fmt_cause_or_effect(concept.effect)) + cause = header('MIC', + fmt_cause_or_effect(concept.cause)) + effect = header('MIE', + fmt_cause_or_effect(concept.effect)) ce = side_by_side(cause, effect) mechanism = fmt_mechanism(concept.mechanism, concept.subsystem) @@ -317,29 +321,30 @@ def fmt_cause_or_effect(x): # pylint: disable=missing-docstring return header(title, ce, HEADER_BAR_2, HEADER_BAR_2, center=center) -def fmt_mip(mip, verbose=True): - '''Format a |Mip|.''' - if mip is False or mip is None: # mips can be Falsy +def fmt_ria(ria, verbose=True, mip=False): + """Format a |RepertoireIrreducibilityAnalysis|.""" + if ria is False or ria is None: # RIAs can be Falsy return '' if verbose: mechanism = 'Mechanism: {}\n'.format( - fmt_mechanism(mip.mechanism, mip.subsystem)) - direction = '\nDirection: {}\n'.format(mip.direction) + fmt_mechanism(ria.mechanism, ria.subsystem)) + direction = '\nDirection: {}'.format(ria.direction) else: mechanism = '' direction = '' if config.REPR_VERBOSITY is HIGH: - partition = '\nPartition:\n{}'.format( - indent(fmt_bipartition(mip.partition, mip.subsystem))) - unpartitioned_repertoire = '\nUnpartitioned Repertoire:\n{}'.format( - indent(fmt_repertoire(mip.unpartitioned_repertoire))) - partitioned_repertoire = '\nPartitioned Repertoire:\n{}'.format( - indent(fmt_repertoire(mip.partitioned_repertoire))) + partition = '\n{}:\n{}'.format( + ('MIP' if mip else 'Partition'), + indent(fmt_bipartition(ria.partition, ria.subsystem))) + repertoire = '\nRepertoire:\n{}'.format( + indent(fmt_repertoire(ria.repertoire))) + partitioned_repertoire = '\nPartitioned repertoire:\n{}'.format( + indent(fmt_repertoire(ria.partitioned_repertoire))) else: partition = '' - unpartitioned_repertoire = '' + repertoire = '' partitioned_repertoire = '' # TODO? print the two repertoires side-by-side @@ -347,22 +352,22 @@ def fmt_mip(mip, verbose=True): '{SMALL_PHI} = {phi}\n' '{mechanism}' 'Purview = {purview}' - '{partition}' '{direction}' - '{unpartitioned_repertoire}' + '{partition}' + '{repertoire}' '{partitioned_repertoire}').format( SMALL_PHI=SMALL_PHI, mechanism=mechanism, - purview=fmt_mechanism(mip.purview, mip.subsystem), + purview=fmt_mechanism(ria.purview, ria.subsystem), direction=direction, - phi=fmt_number(mip.phi), + phi=fmt_number(ria.phi), partition=partition, - unpartitioned_repertoire=unpartitioned_repertoire, + repertoire=repertoire, partitioned_repertoire=partitioned_repertoire) def fmt_cut(cut, subsystem=None): - '''Format a |Cut|.''' + """Format a |Cut|.""" # HACK HACK # TODO: fix this mess. from .cuts import KCut, NullCut @@ -389,39 +394,39 @@ def fmt_cut(cut, subsystem=None): def fmt_kcut(cut): - '''Format a |KCut|.''' + """Format a |KCut|.""" return 'KCut {}\n{}'.format(cut.direction, cut.partition) -def fmt_big_mip(big_mip, constellations=True): - '''Format a |BigMip|.''' - if constellations: +def fmt_sia(sia, ces=True): + """Format a |SystemIrreducibilityAnalysis|.""" + if ces: body = ( - '{unpartitioned_constellation}' - '{partitioned_constellation}'.format( - unpartitioned_constellation=fmt_constellation( - big_mip.unpartitioned_constellation, - 'Unpartitioned Constellation'), - partitioned_constellation=fmt_constellation( - big_mip.partitioned_constellation, - 'Partitioned Constellation'))) + '{ces}' + '{partitioned_ces}'.format( + ces=fmt_ces( + sia.ces, + 'Cause-effect structure'), + partitioned_ces=fmt_ces( + sia.partitioned_ces, + 'Partitioned cause-effect structure'))) center_header = True else: body = '' center_header = False - title = 'Big Mip: {BIG_PHI} = {phi}'.format( - BIG_PHI=BIG_PHI, phi=fmt_number(big_mip.phi)) + title = 'System irreducibility analysis: {BIG_PHI} = {phi}'.format( + BIG_PHI=BIG_PHI, phi=fmt_number(sia.phi)) - cut = fmt_cut(big_mip.cut, big_mip.subsystem) + cut = fmt_cut(sia.cut, sia.subsystem) - body = header(str(big_mip.subsystem), body, center=center_header) + body = header(str(sia.subsystem), body, center=center_header) body = header(cut, body, center=center_header) return box(header(title, body, center=center_header)) def fmt_repertoire(r): - '''Format a repertoire.''' + """Format a repertoire.""" # TODO: will this get unwieldy with large repertoires? if r is None: return '' @@ -448,27 +453,26 @@ def fmt_repertoire(r): return box('\n'.join(lines)) -def fmt_ac_mip(mip): - '''Format an AcMip.''' - if mip is None: +def fmt_ac_ria(ria): + """Format an AcRepertoireIrreducibilityAnalysis.""" + if ria is None: return '' causality = { # TODO: use node labels - Direction.PAST: (str(mip.purview), ARROW_LEFT, str(mip.mechanism)), - Direction.FUTURE: (str(mip.mechanism), ARROW_RIGHT, str(mip.purview)) - }[mip.direction] + Direction.CAUSE: (str(ria.purview), ARROW_LEFT, str(ria.mechanism)), + Direction.EFFECT: (str(ria.mechanism), ARROW_RIGHT, str(ria.purview)) + }[ria.direction] causality = ' '.join(causality) return '{ALPHA} = {alpha} {causality}'.format( ALPHA=ALPHA, - alpha=round(mip.alpha, 4), + alpha=round(ria.alpha, 4), causality=causality) def fmt_account(account, title=None): - '''Format an Account or a DirectedAccount.''' - + """Format an Account or a DirectedAccount.""" if title is None: title = account.__class__.__name__ # `Account` or `DirectedAccount` @@ -477,37 +481,39 @@ def fmt_account(account, title=None): body = '' body += 'Irreducible effects\n' - body += '\n'.join(fmt_ac_mip(m) for m in account.irreducible_effects) + body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_effects) body += '\nIrreducible causes\n' - body += '\n'.join(fmt_ac_mip(m) for m in account.irreducible_causes) + body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_causes) return '\n' + header(title, body, under_char='*') -def fmt_ac_big_mip(ac_big_mip): - '''Format a AcBigMip.''' +def fmt_ac_sia(ac_sia): + """Format a AcSystemIrreducibilityAnalysis.""" body = ( '{ALPHA} = {alpha}\n' - 'direction: {ac_big_mip.direction}\n' - 'transition: {ac_big_mip.transition}\n' - 'before state: {ac_big_mip.before_state}\n' - 'after state: {ac_big_mip.after_state}\n' - 'cut:\n{ac_big_mip.cut}\n' - '{unpartitioned_account}\n' + 'direction: {ac_sia.direction}\n' + 'transition: {ac_sia.transition}\n' + 'before state: {ac_sia.before_state}\n' + 'after state: {ac_sia.after_state}\n' + 'cut:\n{ac_sia.cut}\n' + '{account}\n' '{partitioned_account}'.format( ALPHA=ALPHA, - alpha=round(ac_big_mip.alpha, 4), - ac_big_mip=ac_big_mip, - unpartitioned_account=fmt_account( - ac_big_mip.unpartitioned_account, 'Unpartitioned Account'), + alpha=round(ac_sia.alpha, 4), + ac_sia=ac_sia, + account=fmt_account( + ac_sia.account, 'Account'), partitioned_account=fmt_account( - ac_big_mip.partitioned_account, 'Partitioned Account'))) + ac_sia.partitioned_account, 'Partitioned Account'))) - return box(header('AcBigMip', body, under_char=HORIZONTAL_BAR)) + return box(header('AcSystemIrreducibilityAnalysis', + body, + under_char=HORIZONTAL_BAR)) def fmt_transition(t): - '''Format a |Transition|.''' + """Format a |Transition|.""" return "Transition({} {} {})".format( fmt_mechanism(t.cause_indices, t.cause_system), ARROW_RIGHT, diff --git a/pyphi/models/concept.py b/pyphi/models/mechanism.py similarity index 58% rename from pyphi/models/concept.py rename to pyphi/models/mechanism.py index 7b44b4f9a..b130c1937 100644 --- a/pyphi/models/concept.py +++ b/pyphi/models/mechanism.py @@ -1,31 +1,34 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# models/concept.py +# models/mechanism.py -'''Objects that represent parts of cause-effect structures.''' +"""Mechanism-level objects.""" import numpy as np from . import cmp, fmt from .. import config, connectivity, distribution, utils +from ..direction import Direction +from ..exceptions import WrongDirectionError # pylint: disable=too-many-arguments,too-many-instance-attributes -_mip_attributes = ['phi', 'direction', 'mechanism', 'purview', 'partition', - 'unpartitioned_repertoire', 'partitioned_repertoire'] +_ria_attributes = ['phi', 'direction', 'mechanism', 'purview', 'partition', + 'repertoire', 'partitioned_repertoire'] -class Mip(cmp.Orderable): - '''A minimum information partition for |small_phi| calculation. +class RepertoireIrreducibilityAnalysis(cmp.Orderable): + """An analysis of the irreducibility (|small_phi|) of a mechanism over a + purview, for a given partition, in one temporal direction. These can be compared with the built-in Python comparison operators (``<``, ``>``, etc.). First, |small_phi| values are compared. Then, if these are equal up to |PRECISION|, the size of the mechanism is compared (see the |PICK_SMALLEST_PURVIEW| option in |config|.) - ''' + """ def __init__(self, phi, direction, mechanism, purview, partition, - unpartitioned_repertoire, partitioned_repertoire, + repertoire, partitioned_repertoire, subsystem=None): self._phi = phi self._direction = direction @@ -38,7 +41,7 @@ def _repertoire(repertoire): return None return np.array(repertoire) - self._unpartitioned_repertoire = _repertoire(unpartitioned_repertoire) + self._repertoire = _repertoire(repertoire) self._partitioned_repertoire = _repertoire(partitioned_repertoire) # Optional subsystem - only used to generate nice labeled reprs @@ -46,48 +49,51 @@ def _repertoire(repertoire): @property def phi(self): - '''float: This is the difference between the mechanism's unpartitioned + """float: This is the difference between the mechanism's unpartitioned and partitioned repertoires. - ''' + """ return self._phi @property def direction(self): - '''Direction: |PAST| or |FUTURE|.''' + """Direction: |CAUSE| or |EFFECT|.""" return self._direction @property def mechanism(self): - '''tuple[int]: The mechanism over which to evaluate the MIP.''' + """tuple[int]: The mechanism that was analyzed.""" return self._mechanism @property def purview(self): - '''tuple[int]: The purview over which the unpartitioned repertoire - differs the least from the partitioned repertoire.''' + """tuple[int]: The purview over which the the mechanism was + analyzed. + """ return self._purview @property def partition(self): - '''KPartition: The partition that makes the least difference to the - mechanism's repertoire.''' + """KPartition: The partition of the mechanism-purview pair that was + analyzed. + """ return self._partition @property - def unpartitioned_repertoire(self): - '''np.ndarray: The unpartitioned repertoire of the mechanism.''' - return self._unpartitioned_repertoire + def repertoire(self): + """np.ndarray: The repertoire of the mechanism over the purview.""" + return self._repertoire @property def partitioned_repertoire(self): - '''np.ndarray: The partitioned repertoire of the mechanism. This is the - product of the repertoires of each part of the partition. - ''' + """np.ndarray: The partitioned repertoire of the mechanism over the + purview. This is the product of the repertoires of each part of the + partition. + """ return self._partitioned_repertoire @property def subsystem(self): - '''Subsystem: The |Subsystem| this MIP belongs to.''' + """Subsystem: The |Subsystem| the mechanism belongs to.""" return self._subsystem unorderable_unless_eq = ['direction'] @@ -100,13 +106,15 @@ def order_by(self): def __eq__(self, other): # We don't consider the partition and partitioned repertoire in - # checking for MIP equality. + # checking for RIA equality. attrs = ['phi', 'direction', 'mechanism', 'purview', - 'unpartitioned_repertoire'] + 'repertoire'] return cmp.general_eq(self, other, attrs) def __bool__(self): - '''A |Mip| is ``True`` if it has |small_phi > 0|.''' + """A |RepertoireIrreducibilityAnalysis| is ``True`` if it has + |small_phi > 0|. + """ return not utils.eq(self.phi, 0) def __hash__(self): @@ -114,137 +122,147 @@ def __hash__(self): self.direction, self.mechanism, self.purview, - utils.np_hash(self.unpartitioned_repertoire))) + utils.np_hash(self.repertoire))) def __repr__(self): - return fmt.make_repr(self, _mip_attributes) + return fmt.make_repr(self, _ria_attributes) def __str__(self): - return "MIP\n" + fmt.indent(fmt.fmt_mip(self)) + return ("Repertoire irreducibility analysis\n" + + fmt.indent(fmt.fmt_ria(self))) def to_json(self): - return {attr: getattr(self, attr) for attr in _mip_attributes} + return {attr: getattr(self, attr) for attr in _ria_attributes} -def _null_mip(direction, mechanism, purview, unpartitioned_repertoire=None): - '''The null MIP (of a reducible mechanism).''' +def _null_ria(direction, mechanism, purview, repertoire=None): + """The irreducibility analysis for a reducible mechanism.""" # TODO Use properties here to infer mechanism and purview from # partition yet access them with .mechanism and .partition - return Mip(direction=direction, - mechanism=mechanism, - purview=purview, - partition=None, - unpartitioned_repertoire=unpartitioned_repertoire, - partitioned_repertoire=None, - phi=0.0) + return RepertoireIrreducibilityAnalysis( + direction=direction, + mechanism=mechanism, + purview=purview, + partition=None, + repertoire=repertoire, + partitioned_repertoire=None, + phi=0.0 + ) # ============================================================================= -class Mice(cmp.Orderable): - '''A maximally irreducible cause or effect. +class MaximallyIrreducibleCauseOrEffect(cmp.Orderable): + """A maximally irreducible cause or effect (MICE). These can be compared with the built-in Python comparison operators (``<``, ``>``, etc.). First, |small_phi| values are compared. Then, if these are equal up to |PRECISION|, the size of the mechanism is compared (see the |PICK_SMALLEST_PURVIEW| option in |config|.) - ''' + """ - def __init__(self, mip): - self._mip = mip + def __init__(self, ria): + self._ria = ria @property def phi(self): - '''float: The difference between the mechanism's unpartitioned and + """float: The difference between the mechanism's unpartitioned and partitioned repertoires. - ''' - return self._mip.phi + """ + return self._ria.phi @property def direction(self): - '''Direction: |PAST| or |FUTURE|.''' - return self._mip.direction + """Direction: |CAUSE| or |EFFECT|.""" + return self._ria.direction @property def mechanism(self): - '''list[int]: The mechanism for which the MICE is evaluated.''' - return self._mip.mechanism + """list[int]: The mechanism for which the MICE is evaluated.""" + return self._ria.mechanism @property def purview(self): - '''list[int]: The purview over which this mechanism's |small_phi| is + """list[int]: The purview over which this mechanism's |small_phi| is maximal. - ''' - return self._mip.purview + """ + return self._ria.purview @property - def partition(self): - '''KPartition: The partition that makes the least difference to the - mechanism's repertoire.''' - return self._mip.partition + def mip(self): + """KPartition: The partition that makes the least difference to the + mechanism's repertoire. + """ + return self._ria.partition @property def repertoire(self): - '''np.ndarray: The unpartitioned repertoire of the mechanism over the + """np.ndarray: The unpartitioned repertoire of the mechanism over the purview. - ''' - return self._mip.unpartitioned_repertoire + """ + return self._ria.repertoire @property def partitioned_repertoire(self): - '''np.ndarray: The partitioned repertoire of the mechanism over the + """np.ndarray: The partitioned repertoire of the mechanism over the purview. - ''' - return self._mip.partitioned_repertoire + """ + return self._ria.partitioned_repertoire @property - def mip(self): - '''MIP: The minimum information partition for this mechanism.''' - return self._mip + def ria(self): + """RepertoireIrreducibilityAnalysis: The irreducibility analysis for + this mechanism. + """ + return self._ria def __repr__(self): - return fmt.make_repr(self, ['mip']) + return fmt.make_repr(self, ['ria']) def __str__(self): - return "Mice\n" + fmt.indent(fmt.fmt_mip(self.mip)) + return ( + "Maximally-irreducible {}\n".format(str(self.direction).lower()) + + fmt.indent(fmt.fmt_ria(self.ria, mip=True)) + ) - unorderable_unless_eq = Mip.unorderable_unless_eq + unorderable_unless_eq = \ + RepertoireIrreducibilityAnalysis.unorderable_unless_eq def order_by(self): - return self.mip.order_by() + return self.ria.order_by() def __eq__(self, other): - return self.mip == other.mip + return self.ria == other.ria def __hash__(self): - return hash(('Mice', self._mip)) + return hash(('MICE', self._ria)) def to_json(self): - return {'mip': self.mip} + return {'ria': self.ria} def _relevant_connections(self, subsystem): - '''Identify connections that “matter” to this concept. + """Identify connections that “matter” to this concept. - For a core cause, the important connections are those which connect the - purview to the mechanism; for a core effect they are the connections - from the mechanism to the purview. + For a |MIC|, the important connections are those which connect the + purview to the mechanism; for a |MIE| they are the connections from the + mechanism to the purview. Returns an |N x N| matrix, where `N` is the number of nodes in this corresponding subsystem, that identifies connections that “matter” to - this |Mice|: + this MICE: - ``direction == Direction.PAST``: + ``direction == Direction.CAUSE``: ``relevant_connections[i,j]`` is ``1`` if node ``i`` is in the cause purview and node ``j`` is in the mechanism (and ``0`` otherwise). - ``direction == Direction.FUTURE``: + ``direction == Direction.EFFECT``: ``relevant_connections[i,j]`` is ``1`` if node ``i`` is in the mechanism and node ``j`` is in the effect purview (and ``0`` otherwise). Args: - subsystem (Subsystem): The |Subsystem| of this |Mice|. + subsystem (Subsystem): The |Subsystem| of this MICE. Returns: np.ndarray: A |N x N| matrix of connections, where |N| is the size @@ -252,7 +270,7 @@ def _relevant_connections(self, subsystem): Raises: ValueError: If ``direction`` is invalid. - ''' + """ _from, to = self.direction.order(self.mechanism, self.purview) return connectivity.relevant_connections(subsystem.network.size, _from, to) @@ -260,16 +278,58 @@ def _relevant_connections(self, subsystem): # TODO: pass in `cut` instead? We can infer # subsystem indices from the cut itself, validate, and check. def damaged_by_cut(self, subsystem): - '''Return ``True`` if this |Mice| is affected by the subsystem's cut. + """Return ``True`` if this MICE is affected by the subsystem's cut. - The cut affects the |Mice| if it either splits the |Mice|'s mechanism + The cut affects the MICE if it either splits the MICE's mechanism or splits the connections between the purview and mechanism. - ''' + """ return (subsystem.cut.splits_mechanism(self.mechanism) or np.any(self._relevant_connections(subsystem) * subsystem.cut.cut_matrix(subsystem.network.size) == 1)) +class MaximallyIrreducibleCause(MaximallyIrreducibleCauseOrEffect): + """A maximally irreducible cause (MIC). + + These can be compared with the built-in Python comparison operators (``<``, + ``>``, etc.). First, |small_phi| values are compared. Then, if these are + equal up to |PRECISION|, the size of the mechanism is compared (see the + |PICK_SMALLEST_PURVIEW| option in |config|.) + """ + + def __init__(self, ria): + if ria.direction != Direction.CAUSE: + raise WrongDirectionError('A MIC must be initialized with a RIA ' + 'in the cause direction.') + super().__init__(ria) + + @property + def direction(self): + """Direction: |CAUSE|.""" + return self._ria.direction + + +class MaximallyIrreducibleEffect(MaximallyIrreducibleCauseOrEffect): + """A maximally irreducible effect (MIE). + + These can be compared with the built-in Python comparison operators (``<``, + ``>``, etc.). First, |small_phi| values are compared. Then, if these are + equal up to |PRECISION|, the size of the mechanism is compared (see the + |PICK_SMALLEST_PURVIEW| option in |config|.) + """ + + def __init__(self, ria): + if ria.direction != Direction.EFFECT: + raise WrongDirectionError('A MIE must be initialized with a RIA ' + 'in the effect direction.') + super().__init__(ria) + + @property + def direction(self): + """Direction: |EFFECT|.""" + return self._ria.direction + + # ============================================================================= _concept_attributes = ['phi', 'mechanism', 'cause', 'effect', 'subsystem'] @@ -278,7 +338,7 @@ def damaged_by_cut(self, subsystem): # TODO: make mechanism a property # TODO: make phi a property class Concept(cmp.Orderable): - '''The maximally irreducible cause and effect specified by a mechanism. + """The maximally irreducible cause and effect specified by a mechanism. These can be compared with the built-in Python comparison operators (``<``, ``>``, etc.). First, |small_phi| values are compared. Then, if these are @@ -286,11 +346,13 @@ class Concept(cmp.Orderable): Attributes: mechanism (tuple[int]): The mechanism that the concept consists of. - cause (Mice): The |Mice| representing the core cause of this concept. - effect (Mice): The |Mice| representing the core effect of this concept. + cause (MaximallyIrreducibleCause): The |MIC| representing the + maximally-irreducible cause of this concept. + effect (MaximallyIrreducibleEffect): The |MIE| representing the + maximally-irreducible effect of this concept. subsystem (Subsystem): This concept's parent subsystem. time (float): The number of seconds it took to calculate. - ''' + """ def __init__(self, mechanism=None, cause=None, effect=None, subsystem=None, time=None): @@ -308,31 +370,31 @@ def __str__(self): @property def phi(self): - '''float: The size of the concept. + """float: The size of the concept. - This is the minimum of the |small_phi| values of the concept's core - cause and core effect. - ''' + This is the minimum of the |small_phi| values of the concept's |MIC| + and |MIE|. + """ return min(self.cause.phi, self.effect.phi) @property def cause_purview(self): - '''tuple[int]: The cause purview.''' + """tuple[int]: The cause purview.""" return getattr(self.cause, 'purview', None) @property def effect_purview(self): - '''tuple[int]: The effect purview.''' + """tuple[int]: The effect purview.""" return getattr(self.effect, 'purview', None) @property def cause_repertoire(self): - '''np.ndarray: The cause repertoire.''' + """np.ndarray: The cause repertoire.""" return getattr(self.cause, 'repertoire', None) @property def effect_repertoire(self): - '''np.ndarray: The effect repertoire.''' + """np.ndarray: The effect repertoire.""" return getattr(self.effect, 'repertoire', None) unorderable_unless_eq = ['subsystem'] @@ -361,57 +423,57 @@ def __hash__(self): self.subsystem.network)) def __bool__(self): - '''A concept is ``True`` if |small_phi > 0|.''' + """A concept is ``True`` if |small_phi > 0|.""" return not utils.eq(self.phi, 0) def eq_repertoires(self, other): - '''Return whether this concept has the same repertoires as another. + """Return whether this concept has the same repertoires as another. .. warning:: This only checks if the cause and effect repertoires are equal as arrays; mechanisms, purviews, or even the nodes that the mechanism and purview indices refer to, might be different. - ''' + """ return ( np.array_equal(self.cause_repertoire, other.cause_repertoire) and np.array_equal(self.effect_repertoire, other.effect_repertoire)) def emd_eq(self, other): - '''Return whether this concept is equal to another in the context of + """Return whether this concept is equal to another in the context of an EMD calculation. - ''' + """ return (self.phi == other.phi and self.mechanism == other.mechanism and self.eq_repertoires(other)) # TODO Rename to expanded_cause_repertoire, etc def expand_cause_repertoire(self, new_purview=None): - '''See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.''' + """See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.""" return self.subsystem.expand_cause_repertoire( self.cause.repertoire, new_purview) def expand_effect_repertoire(self, new_purview=None): - '''See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.''' + """See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.""" return self.subsystem.expand_effect_repertoire( self.effect.repertoire, new_purview) def expand_partitioned_cause_repertoire(self): - '''See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.''' + """See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.""" return self.subsystem.expand_cause_repertoire( - self.cause.mip.partitioned_repertoire) + self.cause.ria.partitioned_repertoire) def expand_partitioned_effect_repertoire(self): - '''See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.''' + """See :meth:`~pyphi.subsystem.Subsystem.expand_repertoire`.""" return self.subsystem.expand_effect_repertoire( - self.effect.mip.partitioned_repertoire) + self.effect.ria.partitioned_repertoire) def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" dct = { attr: getattr(self, attr) for attr in _concept_attributes + ['time'] } - # These flattened, LOLI-order repertoires are passed to `vphi` via + # These flattened, little-endian repertoires are passed to `vphi` via # `phiserver`. dct.update({ 'expanded_cause_repertoire': distribution.flatten( @@ -435,73 +497,3 @@ def from_json(cls, dct): del dct['expanded_partitioned_effect_repertoire'] return cls(**dct) - - -class Constellation(tuple): - '''A constellation of concepts. - - This is a wrapper around a tuple to provide a nice string representation - and place to put constellation methods. Previously, constellations were - represented as a ``tuple[concept]``; this usage still works in all - functions. - ''' - # TODO: compare constellations using set equality - - def __new__(cls, concepts=()): - '''Normalize the order of concepts in the constellation.''' - return super().__new__(cls, sorted(concepts, key=_concept_sort_key)) - - def __repr__(self): - if config.REPR_VERBOSITY > 0: - return self.__str__() - - return "Constellation{}".format( - super().__repr__()) - - def __str__(self): - return fmt.fmt_constellation(self) - - def to_json(self): - return {'concepts': list(self)} - - @property - def mechanisms(self): - '''The mechanism of each concept.''' - return [concept.mechanism for concept in self] - - @property - def phis(self): - '''The |small_phi| values of each concept.''' - return [concept.phi for concept in self] - - @property - def labeled_mechanisms(self): - '''The labeled mechanism of each concept.''' - if not self: - return [] - label = self[0].subsystem.network.indices2labels - return [list(label(mechanism)) for mechanism in self.mechanisms] - - @classmethod - def from_json(cls, json): - return cls(json['concepts']) - - -def _concept_sort_key(concept): - return (len(concept.mechanism), concept.mechanism) - - -# Maintained for backwards compatibility; constellations are always -# ordered. -# TODO: remove this. -def normalize_constellation(constellation): - '''Deterministically reorder the concepts in a constellation. - - Args: - constellation (Constellation): The constellation in question. - - Returns: - Constellation: The constellation, ordered lexicographically by - mechanism. - ''' - return Constellation(constellation) diff --git a/pyphi/models/subsystem.py b/pyphi/models/subsystem.py new file mode 100644 index 000000000..8f00e3cf0 --- /dev/null +++ b/pyphi/models/subsystem.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# models/subsystem.py + +"""Subsystem-level objects.""" + +from . import cmp, fmt +from .. import config, utils + +# pylint: disable=too-many-arguments + +_sia_attributes = ['phi', 'ces', 'partitioned_ces', 'subsystem', + 'cut_subsystem'] + + +def _concept_sort_key(concept): + return (len(concept.mechanism), concept.mechanism) + + +class CauseEffectStructure(tuple): + """A collection of concepts.""" + # TODO: compare CESs using set equality + + def __new__(cls, concepts=()): + """Normalize the order of concepts in the |CauseEffectStructure|.""" + return super().__new__(cls, sorted(concepts, key=_concept_sort_key)) + + def __repr__(self): + if config.REPR_VERBOSITY > 0: + return self.__str__() + + return "CauseEffectStructure{}".format( + super().__repr__()) + + def __str__(self): + return fmt.fmt_ces(self) + + def to_json(self): + return {'concepts': list(self)} + + @property + def mechanisms(self): + """The mechanism of each concept.""" + return [concept.mechanism for concept in self] + + @property + def phis(self): + """The |small_phi| values of each concept.""" + return [concept.phi for concept in self] + + @property + def labeled_mechanisms(self): + """The labeled mechanism of each concept.""" + if not self: + return [] + label = self[0].subsystem.network.indices2labels + return tuple(list(label(mechanism)) for mechanism in self.mechanisms) + + @classmethod + def from_json(cls, json): + return cls(json['concepts']) + + +class SystemIrreducibilityAnalysis(cmp.Orderable): + """An analysis of system irreducibility (|big_phi|). + + Contains the |big_phi| value of the |Subsystem|, the cause-effect + structure, and all the intermediate results obtained in the course of + computing them. + + These can be compared with the built-in Python comparison operators (``<``, + ``>``, etc.). First, |big_phi| values are compared. Then, if these are + equal up to |PRECISION|, the one with the larger subsystem is greater. + + Attributes: + phi (float): The |big_phi| value for the subsystem when taken against + this analysis, *i.e.* the difference between the cause-effect + structure and the partitioned cause-effect structure for this + analysis. + ces (CauseEffectStructure): The cause-effect structure of + the whole subsystem. + partitioned_ces (CauseEffectStructure): The cause-effect structure when + the subsystem is cut. + subsystem (Subsystem): The subsystem this analysis was calculated for. + cut_subsystem (Subsystem): The subsystem with the minimal cut applied. + time (float): The number of seconds it took to calculate. + small_phi_time (float): The number of seconds it took to calculate the + cause-effect structure. + """ + + def __init__(self, phi=None, ces=None, + partitioned_ces=None, subsystem=None, + cut_subsystem=None, time=None, small_phi_time=None): + self.phi = phi + self.ces = ces + self.partitioned_ces = partitioned_ces + self.subsystem = subsystem + self.cut_subsystem = cut_subsystem + self.time = time + self.small_phi_time = small_phi_time + + def __repr__(self): + return fmt.make_repr(self, _sia_attributes) + + def __str__(self, ces=True): + return fmt.fmt_sia(self, ces=ces) + + def print(self, ces=True): + """Print this |SystemIrreducibilityAnalysis|, optionally without + cause-effect structures. + """ + print(self.__str__(ces=ces)) + + @property + def cut(self): + """The unidirectional cut that makes the least difference to the + subsystem. + """ + return self.cut_subsystem.cut + + @property + def network(self): + """The network the subsystem belongs to.""" + return self.subsystem.network + + unorderable_unless_eq = ['network'] + + def order_by(self): + return [self.phi, len(self.subsystem), self.subsystem.node_indices] + + def __eq__(self, other): + return cmp.general_eq(self, other, _sia_attributes) + + def __bool__(self): + """A |SystemIrreducibilityAnalysis| is ``True`` if it has + |big_phi > 0|. + """ + return not utils.eq(self.phi, 0) + + def __hash__(self): + return hash((self.phi, + self.ces, + self.partitioned_ces, + self.subsystem, + self.cut_subsystem)) + + def to_json(self): + """Return a JSON-serializable representation.""" + return { + attr: getattr(self, attr) + for attr in _sia_attributes + ['time', 'small_phi_time'] + } + + +def _null_sia(subsystem, phi=0.0): + """Return a |SystemIrreducibilityAnalysis| with zero |big_phi| and empty + cause-effect structures. + + This is the analysis result for a reducible subsystem. + """ + return SystemIrreducibilityAnalysis(subsystem=subsystem, + cut_subsystem=subsystem, + phi=phi, + ces=(), + partitioned_ces=()) diff --git a/pyphi/network.py b/pyphi/network.py index e95c57f87..68976be45 100644 --- a/pyphi/network.py +++ b/pyphi/network.py @@ -2,10 +2,10 @@ # -*- coding: utf-8 -*- # network.py -''' +""" Represents the network of interest. This is the primary object of PyPhi and the context of all |small_phi| and |big_phi| computation. -''' +""" import json @@ -17,52 +17,51 @@ class Network: - '''A network of nodes. + """A network of nodes. - Represents the network we're analyzing and holds auxilary data about it. + Represents the network under analysis and holds auxilary data about it. Args: tpm (np.ndarray): The transition probability matrix of the network. - The TPM can be provided in either state-by-node (either - 2-dimensional or n-dimensional) or state-by-state form. In either - form, row indices must follow the LOLI convention (see - :ref:`tpm-conventions`). In state-by-state form column indices must - also follow the LOLI convention. + The TPM can be provided in any of three forms: **state-by-state**, + **state-by-node**, or **multidimensional state-by-node** form. + In the state-by-node forms, row indices must follow the + little-endian convention (see :ref:`little-endian-convention`). In + state-by-state form, column indices must also follow the + little-endian convention. - If given in state-by-node form, the TPM can be either + If the TPM is given in state-by-node form, it can be either 2-dimensional, so that ``tpm[i]`` gives the probabilities of each - node being on if the past state is encoded by |i| according to - LOLI, or in n-dimensional form, so that ``tpm[(0, 0, 1)]`` gives - the probabilities of each node being on if the past state is - |n0 = 0, n1 = 0, n2 = 1|. + node being ON if the previous state is encoded by |i| according to + the little-endian convention, or in multidimensional form, so that + ``tpm[(0, 0, 1)]`` gives the probabilities of each node being ON if + the previous state is |N_0 = 0, N_1 = 0, N_2 = 1|. The shape of the 2-dimensional form of a state-by-node TPM must be - ``(S, N)``, and the shape of the n-dimensional form of the TPM must - be ``[2] * N + [N]``, where ``S`` is the number of states and ``N`` - is the number of nodes in the network. + ``(s, n)``, and the shape of the multidimensional form of the TPM + must be ``[2] * n + [n]``, where ``s`` is the number of states and + ``n`` is the number of nodes in the network. Keyword Args: - connectivity_matrix (np.ndarray): A square binary adjacency matrix - indicating the connections between nodes in the network. - ``connectivity_matrix[i][j] == 1`` means that node |i| is connected - to node |j|. If no connectivity matrix is given, every node is - connected to every node **(including itself)**. + cm (np.ndarray): A square binary adjacency matrix indicating the + connections between nodes in the network. ``cm[i][j] == 1`` means + that node |i| is connected to node |j| (see :ref:`cm-conventions`). + **If no connectivity matrix is given, PyPhi assumes that every node + is connected to every node (including itself)**. node_labels (tuple[str]): Human-readable labels for each node in the network. Example: - In a 3-node network, ``a_network.tpm[(0, 0, 1)]`` gives the transition - probabilities for each node at |t| given that state at |t-1| was - |n0 = 0, n1 = 0, n2 = 1|. - ''' + In a 3-node network, ``the_network.tpm[(0, 0, 1)]`` gives the + transition probabilities for each node at |t| given that state at |t-1| + was |N_0 = 0, N_1 = 0, N_2 = 1|. + """ # TODO make tpm also optional when implementing logical network definition - def __init__(self, tpm, connectivity_matrix=None, node_labels=None, - purview_cache=None): - + def __init__(self, tpm, cm=None, node_labels=None, purview_cache=None): self._tpm, self._tpm_hash = self._build_tpm(tpm) - self._cm, self._cm_hash = self._build_cm(connectivity_matrix) + self._cm, self._cm_hash = self._build_cm(cm) self._node_indices = tuple(range(self.size)) self._node_labels = node_labels or default_labels(self._node_indices) self.purview_cache = purview_cache or cache.PurviewCache() @@ -71,24 +70,25 @@ def __init__(self, tpm, connectivity_matrix=None, node_labels=None, @property def tpm(self): - '''np.ndarray: The network's transition probability matrix, in n-dimensional - form.''' + """np.ndarray: The network's transition probability matrix, in + multidimensional form. + """ return self._tpm @staticmethod def _build_tpm(tpm): - '''Validate the TPM passed by the user and convert to n-dimensional + """Validate the TPM passed by the user and convert to multidimensional form. - ''' + """ tpm = np.array(tpm) validate.tpm(tpm) - # Convert to N-D state-by-node form + # Convert to multidimensional state-by-node form if is_state_by_state(tpm): tpm = convert.state_by_state2state_by_node(tpm) else: - tpm = convert.to_n_dimensional(tpm) + tpm = convert.to_multidimensional(tpm) utils.np_immutable(tpm) @@ -96,16 +96,17 @@ def _build_tpm(tpm): @property def cm(self): - '''np.ndarray: The network's connectivity matrix. + """np.ndarray: The network's connectivity matrix. A square binary adjacency matrix indicating the connections between nodes in the network. - ''' + """ return self._cm def _build_cm(self, cm): - '''Convert the passed CM to the proper format, or construct the - unitary CM if none was provided.''' + """Convert the passed CM to the proper format, or construct the + unitary CM if none was provided. + """ if cm is None: # Assume all are connected. cm = np.ones((self.size, self.size)) @@ -118,51 +119,52 @@ def _build_cm(self, cm): @property def connectivity_matrix(self): - '''np.ndarray: Alias for ``cm``.''' + """np.ndarray: Alias for ``cm``.""" return self._cm @property def causally_significant_nodes(self): - '''See :func:`pyphi.connectivity.causally_significant_nodes`.''' + """See :func:`pyphi.connectivity.causally_significant_nodes`.""" return connectivity.causally_significant_nodes(self.cm) @property def size(self): - '''int: The number of nodes in the network.''' + """int: The number of nodes in the network.""" return self.tpm.shape[-1] # TODO extend to nonbinary nodes @property def num_states(self): - '''int: The number of possible states of the network.''' + """int: The number of possible states of the network.""" return 2 ** self.size @property def node_indices(self): - '''tuple[int]: The indices of nodes in the network. + """tuple[int]: The indices of nodes in the network. This is equivalent to ``tuple(range(network.size))``. - ''' + """ return self._node_indices @property def node_labels(self): - '''tuple[str]: The labels of nodes in the network.''' + """tuple[str]: The labels of nodes in the network.""" return self._node_labels def labels2indices(self, labels): - '''Convert a tuple of node labels to node indices.''' + """Convert a tuple of node labels to node indices.""" _map = dict(zip(self.node_labels, self.node_indices)) return tuple(_map[label] for label in labels) def indices2labels(self, indices): - '''Convert a tuple of node indices to node labels.''' + """Convert a tuple of node indices to node labels.""" _map = dict(zip(self.node_indices, self.node_labels)) return tuple(_map[index] for index in indices) def parse_node_indices(self, nodes): - '''Returns the nodes indices for nodes, where ``nodes`` is either - already integer indices or node labels.''' + """Return the nodes indices for nodes, where ``nodes`` is either + already integer indices or node labels. + """ if not nodes: indices = () elif all(isinstance(node, str) for node in nodes): @@ -175,35 +177,37 @@ def parse_node_indices(self, nodes): # interested in caching at the Network-level... @cache.method('purview_cache') def potential_purviews(self, direction, mechanism): - '''All purviews which are not clearly reducible for mechanism. + """All purviews which are not clearly reducible for mechanism. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism which all purviews are checked for reducibility over. Returns: list[tuple[int]]: All purviews which are irreducible over ``mechanism``. - ''' + """ all_purviews = utils.powerset(self._node_indices) return irreducible_purviews(self.cm, direction, mechanism, all_purviews) def __repr__(self): - return 'Network({}, connectivity_matrix={})'.format(self.tpm, self.cm) + return 'Network({}, cm={})'.format(self.tpm, self.cm) def __str__(self): return self.__repr__() def __eq__(self, other): - '''Return whether this network equals the other object. + """Return whether this network equals the other object. Networks are equal if they have the same TPM and CM. - ''' - return (np.array_equal(self.tpm, other.tpm) - and np.array_equal(self.cm, other.cm) - if isinstance(other, type(self)) else False) + """ + return ( + isinstance(other, Network) and + np.array_equal(self.tpm, other.tpm) and + np.array_equal(self.cm, other.cm) + ) def __ne__(self, other): return not self.__eq__(other) @@ -213,7 +217,7 @@ def __hash__(self): return hash((self._tpm_hash, self._cm_hash)) def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return { 'tpm': self.tpm, 'cm': self.cm, @@ -223,17 +227,17 @@ def to_json(self): @classmethod def from_json(cls, json_dict): - '''Return a |Network| object from a JSON dictionary representation.''' + """Return a |Network| object from a JSON dictionary representation.""" return Network(json_dict['tpm'], json_dict['cm'], node_labels=json_dict['labels']) def irreducible_purviews(cm, direction, mechanism, purviews): - '''Returns all purview which are irreducible for the mechanism. + """Return all purviews which are irreducible for the mechanism. Args: cm (np.ndarray): An |N x N| connectivity matrix. - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. purviews (list[tuple[int]]): The purviews to check. mechanism (tuple[int]): The mechanism in question. @@ -243,9 +247,9 @@ def irreducible_purviews(cm, direction, mechanism, purviews): Raises: ValueError: If ``direction`` is invalid. - ''' + """ def reducible(purview): - '''Returns ``True`` if purview is trivially reducible.''' + """Return ``True`` if purview is trivially reducible.""" _from, to = direction.order(mechanism, purview) return connectivity.block_reducible(cm, _from, to) @@ -253,14 +257,14 @@ def reducible(purview): def from_json(filename): - '''Convert a JSON network to a PyPhi network. + """Convert a JSON network to a PyPhi network. Args: filename (str): A path to a JSON file representing a network. Returns: Network: The corresponding PyPhi network object. - ''' + """ with open(filename) as f: loaded = json.load(f) diff --git a/pyphi/node.py b/pyphi/node.py index 4b677f317..408f5c108 100644 --- a/pyphi/node.py +++ b/pyphi/node.py @@ -2,10 +2,10 @@ # -*- coding: utf-8 -*- # node.py -''' +""" Represents a node in a network. Each node has a unique index, its position in the network's list of nodes. -''' +""" # pylint: disable=too-many-arguments @@ -21,7 +21,7 @@ # TODO extend to nonbinary nodes @functools.total_ordering class Node: - '''A node in a subsystem. + """A node in a subsystem. Args: tpm (np.ndarray): The TPM of the subsystem. @@ -36,7 +36,7 @@ class Node: state j at t+1 if the state of its inputs is i at t. If the node is a single element with a cut selfloop, (i.e. it has no inputs), the tpm is simply its unconstrained effect repertoire. - ''' + """ def __init__(self, tpm, cm, index, state, label): @@ -77,7 +77,7 @@ def __init__(self, tpm, cm, index, state, label): # the state of the node's inputs at t, and the last dimension is # indexed by the node's state at t+1. This representation makes it easy # to condition on the node state. - self.tpm = np.moveaxis([tpm_off, tpm_on], 0, -1) + self.tpm = np.stack([tpm_off, tpm_on], axis=-1) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Make the TPM immutable (for hashing). @@ -89,22 +89,22 @@ def __init__(self, tpm, cm, index, state, label): @property def tpm_off(self): - '''The TPM of this node containing only the 'OFF' probabilities.''' + """The TPM of this node containing only the 'OFF' probabilities.""" return self.tpm[..., 0] @property def tpm_on(self): - '''The TPM of this node containing only the 'ON' probabilities.''' + """The TPM of this node containing only the 'ON' probabilities.""" return self.tpm[..., 1] @property def inputs(self): - '''The set of nodes with connections to this node.''' + """The set of nodes with connections to this node.""" return self._inputs @property def outputs(self): - '''The set of nodes this node has connections to.''' + """The set of nodes this node has connections to.""" return self._outputs def __repr__(self): @@ -114,7 +114,7 @@ def __str__(self): return self.__repr__() def __eq__(self, other): - '''Return whether this node equals the other object. + """Return whether this node equals the other object. Two nodes are equal if they belong to the same subsystem and have the same index (their TPMs must be the same in that case, so this method @@ -122,7 +122,7 @@ def __eq__(self, other): Labels are for display only, so two equal nodes may have different labels. - ''' + """ return (self.index == other.index and np.array_equal(self.tpm, other.tpm) and self.state == other.state and @@ -140,22 +140,22 @@ def __hash__(self): # TODO do we need more than the index? def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return self.index def default_label(index): - '''Default label for a node.''' + """Default label for a node.""" return "n{}".format(index) def default_labels(indices): - '''Default labels for serveral nodes.''' + """Default labels for serveral nodes.""" return tuple(default_label(i) for i in indices) def generate_nodes(tpm, cm, network_state, indices, labels=None): - '''Generate |Node| objects for a subsystem. + """Generate |Node| objects for a subsystem. Args: tpm (np.ndarray): The system's TPM @@ -168,7 +168,7 @@ def generate_nodes(tpm, cm, network_state, indices, labels=None): Returns: tuple[Node]: The nodes of the system. - ''' + """ if labels is None: labels = default_labels(indices) else: @@ -181,11 +181,11 @@ def generate_nodes(tpm, cm, network_state, indices, labels=None): def expand_node_tpm(tpm): - '''Broadcast a node TPM over the full network. + """Broadcast a node TPM over the full network. This is different from broadcasting the TPM of a full system since the last dimension (containing the state of the node) contains only the probability of *this* node being on, rather than the probabilities for each node. - ''' + """ uc = np.ones([2 for node in tpm.shape]) return uc * tpm diff --git a/pyphi/partition.py b/pyphi/partition.py index 9ffec0f7f..b5dbc8cd7 100644 --- a/pyphi/partition.py +++ b/pyphi/partition.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # partition.py -''' +""" Functions for generating partitions. -''' +""" from itertools import chain, product @@ -12,8 +12,8 @@ # From stackoverflow.com/questions/19368375/set-partitions-in-python -def partitions(collection): - '''Generate all set partitions of a collection. +def partitions(collection): # pylint: disable=inconsistent-return-statements + """Generate all set partitions of a collection. Example: >>> list(partitions(range(3))) # doctest: +NORMALIZE_WHITESPACE @@ -22,7 +22,7 @@ def partitions(collection): [[0, 1], [2]], [[1], [0, 2]], [[0], [1], [2]]] - ''' + """ collection = list(collection) # Special cases @@ -41,7 +41,7 @@ def partitions(collection): @cache(cache={}, maxmem=None) def bipartition_indices(N): - '''Return indices for undirected bipartitions of a sequence. + """Return indices for undirected bipartitions of a sequence. Args: N (int): The length of the sequence. @@ -54,7 +54,7 @@ def bipartition_indices(N): >>> N = 3 >>> bipartition_indices(N) [((), (0, 1, 2)), ((0,), (1, 2)), ((1,), (0, 2)), ((0, 1), (2,))] - ''' + """ result = [] if N <= 0: return result @@ -70,7 +70,7 @@ def bipartition_indices(N): # TODO? rename to `bipartitions` def bipartition(seq): - '''Return a list of bipartitions for a sequence. + """Return a list of bipartitions for a sequence. Args: a (Iterable): The sequence to partition. @@ -82,14 +82,15 @@ def bipartition(seq): Example: >>> bipartition((1,2,3)) [((), (1, 2, 3)), ((1,), (2, 3)), ((2,), (1, 3)), ((1, 2), (3,))] - ''' - return [(tuple(seq[i] for i in part0_idx), tuple(seq[j] for j in part1_idx)) + """ + return [(tuple(seq[i] for i in part0_idx), + tuple(seq[j] for j in part1_idx)) for part0_idx, part1_idx in bipartition_indices(len(seq))] @cache(cache={}, maxmem=None) def directed_bipartition_indices(N): - '''Return indices for directed bipartitions of a sequence. + """Return indices for directed bipartitions of a sequence. Args: N (int): The length of the sequence. @@ -109,14 +110,14 @@ def directed_bipartition_indices(N): ((0, 2), (1,)), ((1, 2), (0,)), ((0, 1, 2), ())] - ''' + """ indices = bipartition_indices(N) return indices + [idx[::-1] for idx in indices[::-1]] # TODO? [optimization] optimize this to use indices rather than nodes def directed_bipartition(seq, nontrivial=False): - '''Return a list of directed bipartitions for a sequence. + """Return a list of directed bipartitions for a sequence. Args: seq (Iterable): The sequence to partition. @@ -135,7 +136,7 @@ def directed_bipartition(seq, nontrivial=False): ((1, 3), (2,)), ((2, 3), (1,)), ((1, 2, 3), ())] - ''' + """ bipartitions = [ (tuple(seq[i] for i in part0_idx), tuple(seq[j] for j in part1_idx)) for part0_idx, part1_idx in directed_bipartition_indices(len(seq)) @@ -149,20 +150,20 @@ def directed_bipartition(seq, nontrivial=False): def bipartition_of_one(seq): - '''Generate bipartitions where one part is of length 1.''' + """Generate bipartitions where one part is of length 1.""" seq = list(seq) for i, elt in enumerate(seq): yield ((elt,), tuple(seq[:i] + seq[(i + 1):])) def reverse_elements(seq): - '''Reverse the elements of a sequence.''' + """Reverse the elements of a sequence.""" for elt in seq: yield elt[::-1] def directed_bipartition_of_one(seq): - '''Generate directed bipartitions where one part is of length 1. + """Generate directed bipartitions where one part is of length 1. Args: seq (Iterable): The sequence to partition. @@ -180,14 +181,14 @@ def directed_bipartition_of_one(seq): ((2, 3), (1,)), ((1, 3), (2,)), ((1, 2), (3,))] - ''' + """ bipartitions = list(bipartition_of_one(seq)) return chain(bipartitions, reverse_elements(bipartitions)) @cache(cache={}, maxmem=None) def directed_tripartition_indices(N): - '''Return indices for directed tripartitions of a sequence. + """Return indices for directed tripartitions of a sequence. Args: N (int): The length of the sequence. @@ -200,8 +201,7 @@ def directed_tripartition_indices(N): >>> N = 1 >>> directed_tripartition_indices(N) [((0,), (), ()), ((), (0,), ()), ((), (), (0,))] - ''' - + """ result = [] if N <= 0: return result @@ -218,7 +218,7 @@ def directed_tripartition_indices(N): def directed_tripartition(seq): - '''Generator over all directed tripartitions of a sequence. + """Generator over all directed tripartitions of a sequence. Args: seq (Iterable): a sequence. @@ -238,7 +238,7 @@ def directed_tripartition(seq): ((5,), (), (2,)), ((), (5,), (2,)), ((), (), (2, 5))] - ''' + """ for a, b, c in directed_tripartition_indices(len(seq)): yield (tuple(seq[i] for i in a), tuple(seq[j] for j in b), @@ -326,12 +326,12 @@ def _b(mu, nu, sigma, n, a, k, collection): # flake8: noqa def k_partitions(collection, k): - '''Generate all ``k``-partitions of a collection. + """Generate all ``k``-partitions of a collection. Example: >>> list(k_partitions(range(3), 2)) [[[0, 1], [2]], [[0], [1, 2]], [[0, 2], [1]]] - ''' + """ collection = list(collection) n = len(collection) diff --git a/pyphi/subsystem.py b/pyphi/subsystem.py index c86ae3a37..a39e65799 100644 --- a/pyphi/subsystem.py +++ b/pyphi/subsystem.py @@ -2,30 +2,36 @@ # -*- coding: utf-8 -*- # subsystem.py -'''Represents a candidate system for |small_phi| and |big_phi| evaluation.''' +"""Represents a candidate system for |small_phi| and |big_phi| evaluation.""" # pylint: disable=too-many-instance-attributes,too-many-public-methods, # pylint: disable=too-many-public-methods,too-many-arguments import functools import itertools +import logging +from time import time import numpy as np from . import Direction, cache, config, distribution, utils, validate -from .distance import small_phi_measure as measure +from .distance import repertoire_distance from .distribution import max_entropy_distribution, repertoire_shape -from .models import (Bipartition, Concept, KPartition, Mice, Mip, NullCut, - Part, Tripartition, _null_mip, cmp) +from .models import (Bipartition, Concept, KPartition, + MaximallyIrreducibleCause, MaximallyIrreducibleEffect, + NullCut, Part, RepertoireIrreducibilityAnalysis, + Tripartition, _null_ria) from .network import irreducible_purviews from .node import generate_nodes from .partition import (bipartition, directed_bipartition, directed_tripartition, k_partitions, partitions) from .tpm import condition_tpm, marginalize_out +log = logging.getLogger(__name__) + class Subsystem: - '''A set of nodes in a network. + """A set of nodes in a network. Args: network (Network): The network the subsystem belongs to. @@ -46,7 +52,7 @@ class Subsystem: node_indices (tuple[int]): The indices of the nodes in the subsystem. cut (Cut): The cut that has been applied to this subsystem. null_cut (Cut): The cut object representing no cut. - ''' + """ def __init__(self, network, state, nodes, cut=None, mice_cache=None, repertoire_cache=None, single_node_repertoire_cache=None, @@ -82,8 +88,8 @@ def __init__(self, network, state, nodes, cut=None, mice_cache=None, # The network's connectivity matrix with cut applied self.cm = self.cut.apply_cut(network.cm) - # Reusable cache for core causes & effects - self._mice_cache = cache.MiceCache(self, mice_cache) + # Reusable cache for maximally-irreducible causes and effects + self._mice_cache = cache.MICECache(self, mice_cache) # Cause & effect repertoire caches # TODO: if repertoire caches are never reused, there's no reason to @@ -100,73 +106,73 @@ def __init__(self, network, state, nodes, cut=None, mice_cache=None, @property def nodes(self): - '''tuple[Node]: The nodes in this |Subsystem|.''' + """tuple[Node]: The nodes in this |Subsystem|.""" return self._nodes @nodes.setter def nodes(self, value): - '''Remap indices to nodes whenever nodes are changed, e.g. in the + """Remap indices to nodes whenever nodes are changed, e.g. in the `macro` module. - ''' + """ # pylint: disable=attribute-defined-outside-init self._nodes = value self._index2node = {node.index: node for node in self._nodes} @property def proper_state(self): - '''tuple[int]: The state of the subsystem. + """tuple[int]: The state of the subsystem. ``proper_state[i]`` gives the state of the |ith| node **in the subsystem**. Note that this is **not** the state of ``nodes[i]``. - ''' + """ return utils.state_of(self.node_indices, self.state) @property def connectivity_matrix(self): - '''np.ndarray: Alias for ``Subsystem.cm``.''' + """np.ndarray: Alias for ``Subsystem.cm``.""" return self.cm @property def size(self): - '''int: The number of nodes in the subsystem.''' + """int: The number of nodes in the subsystem.""" return len(self.node_indices) @property def is_cut(self): - '''bool: ``True`` if this Subsystem has a cut applied to it.''' + """bool: ``True`` if this Subsystem has a cut applied to it.""" return not self.cut.is_null @property def cut_indices(self): - '''tuple[int]: The nodes of this subsystem to cut for |big_phi| + """tuple[int]: The nodes of this subsystem to cut for |big_phi| computations. This was added to support ``MacroSubsystem``, which cuts indices other than ``node_indices``. - ''' + """ return self.node_indices @property def cut_mechanisms(self): - '''list[tuple[int]]: The mechanisms that are cut in this system.''' + """list[tuple[int]]: The mechanisms that are cut in this system.""" return self.cut.all_cut_mechanisms() @property def tpm_size(self): - '''int: The number of nodes in the TPM.''' + """int: The number of nodes in the TPM.""" return self.tpm.shape[-1] def cache_info(self): - '''Report repertoire cache statistics.''' + """Report repertoire cache statistics.""" return { - 'single_node_repertoire': \ + 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() } def clear_caches(self): - '''Clear the mice and repertoire caches.''' + """Clear the mice and repertoire caches.""" self._single_node_repertoire_cache.clear() self._repertoire_cache.clear() self._mice_cache.clear() @@ -178,33 +184,36 @@ def __str__(self): return repr(self) def __bool__(self): - '''Return ``False`` if the Subsystem has no nodes, ``True`` - otherwise.''' + """Return ``False`` if the Subsystem has no nodes, ``True`` + otherwise. + """ return bool(self.nodes) def __eq__(self, other): - '''Return whether this Subsystem is equal to the other object. + """Return whether this Subsystem is equal to the other object. Two Subsystems are equal if their sets of nodes, networks, and cuts are equal. - ''' - if type(self) != type(other): + """ + if not isinstance(other, Subsystem): return False - return (set(self.node_indices) == set(other.node_indices) - and self.state == other.state - and self.network == other.network - and self.cut == other.cut) + return ( + set(self.node_indices) == set(other.node_indices) and + self.state == other.state and + self.network == other.network and + self.cut == other.cut + ) def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): - '''Return whether this subsystem has fewer nodes than the other.''' + """Return whether this subsystem has fewer nodes than the other.""" return len(self.nodes) < len(other.nodes) def __gt__(self, other): - '''Return whether this subsystem has more nodes than the other.''' + """Return whether this subsystem has more nodes than the other.""" return len(self.nodes) > len(other.nodes) def __le__(self, other): @@ -214,14 +223,14 @@ def __ge__(self, other): return len(self.nodes) >= len(other.nodes) def __len__(self): - '''Return the number of nodes in this Subsystem.''' + """Return the number of nodes in this Subsystem.""" return len(self.node_indices) def __hash__(self): return hash((self.network, self.node_indices, self.state, self.cut)) def to_json(self): - '''Return a JSON-serializable representation.''' + """Return a JSON-serializable representation.""" return { 'network': self.network, 'state': self.state, @@ -230,19 +239,19 @@ def to_json(self): } def apply_cut(self, cut): - '''Return a cut version of this |Subsystem|. + """Return a cut version of this |Subsystem|. Args: cut (Cut): The cut to apply to this |Subsystem|. Returns: Subsystem: The cut subsystem. - ''' + """ return Subsystem(self.network, self.state, self.node_indices, cut=cut, mice_cache=self._mice_cache) def indices2nodes(self, indices): - '''Return |Nodes| for these indices. + """Return |Nodes| for these indices. Args: indices (tuple[int]): The indices in question. @@ -252,18 +261,18 @@ def indices2nodes(self, indices): Raises: ValueError: If requested indices are not in the subsystem. - ''' + """ if set(indices) - set(self.node_indices): raise ValueError( "`indices` must be a subset of the Subsystem's indices.") return tuple(self._index2node[n] for n in indices) def indices2labels(self, indices): - '''Returns the node labels for these indices.''' + """Return the node labels for the given indices.""" return tuple(n.label for n in self.indices2nodes(indices)) # TODO extend to nonbinary nodes - @cache.method('_single_node_repertoire_cache', Direction.PAST) + @cache.method('_single_node_repertoire_cache', Direction.CAUSE) def _single_node_cause_repertoire(self, mechanism_node_index, purview): mechanism_node = self._index2node[mechanism_node_index] # We're conditioning on this node's state, so take the TPM for the node @@ -274,9 +283,9 @@ def _single_node_cause_repertoire(self, mechanism_node_index, purview): return marginalize_out((mechanism_node.inputs - purview), tpm) # TODO extend to nonbinary nodes - @cache.method('_repertoire_cache', Direction.PAST) + @cache.method('_repertoire_cache', Direction.CAUSE) def cause_repertoire(self, mechanism, purview): - '''Return the cause repertoire of a mechanism over a purview. + """Return the cause repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the @@ -290,13 +299,14 @@ def cause_repertoire(self, mechanism, purview): .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. - ''' + """ # If the purview is empty, the distribution is empty; return the # multiplicative identity. if not purview: return np.array([1.0]) - # If the mechanism is empty, nothing is specified about the past state - # of the purview; return the purview's maximum entropy distribution. + # If the mechanism is empty, nothing is specified about the previous + # state of the purview; return the purview's maximum entropy + # distribution. if not mechanism: return max_entropy_distribution(purview, self.tpm_size) # Use a frozenset so the arguments to `_single_node_cause_repertoire` @@ -311,13 +321,13 @@ def cause_repertoire(self, mechanism, purview): np.multiply, [self._single_node_cause_repertoire(m, purview) for m in mechanism] ) - # The resulting joint distribution is over past states, which are rows - # in the TPM, so the distribution is a column. The columns of a TPM - # don't necessarily sum to 1, so we normalize. + # The resulting joint distribution is over previous states, which are + # rows in the TPM, so the distribution is a column. The columns of a + # TPM don't necessarily sum to 1, so we normalize. return distribution.normalize(joint) # TODO extend to nonbinary nodes - @cache.method('_single_node_repertoire_cache', Direction.FUTURE) + @cache.method('_single_node_repertoire_cache', Direction.EFFECT) def _single_node_effect_repertoire(self, mechanism, purview_node_index): purview_node = self._index2node[purview_node_index] # Condition on the state of the inputs that are in the mechanism. @@ -326,13 +336,13 @@ def _single_node_effect_repertoire(self, mechanism, purview_node_index): # Marginalize-out the inputs that aren't in the mechanism. nonmechanism_inputs = (purview_node.inputs - mechanism) tpm = marginalize_out(nonmechanism_inputs, tpm) - # Reshape so that the distribution is over future states. + # Reshape so that the distribution is over next states. return tpm.reshape(repertoire_shape([purview_node.index], self.tpm_size)) - @cache.method('_repertoire_cache', Direction.FUTURE) + @cache.method('_repertoire_cache', Direction.EFFECT) def effect_repertoire(self, mechanism, purview): - '''Return the effect repertoire of a mechanism over a purview. + """Return the effect repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the @@ -347,7 +357,7 @@ def effect_repertoire(self, mechanism, purview): .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. - ''' + """ # If the purview is empty, the distribution is empty, so return the # multiplicative identity. if not purview: @@ -366,10 +376,10 @@ def effect_repertoire(self, mechanism, purview): ) def repertoire(self, direction, mechanism, purview): - '''Return the cause or effect repertoire based on a direction. + """Return the cause or effect repertoire based on a direction. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism for which to calculate the repertoire. purview (tuple[int]): The purview over which to calculate the @@ -381,35 +391,34 @@ def repertoire(self, direction, mechanism, purview): Raises: ValueError: If ``direction`` is invalid. - ''' - if direction == Direction.PAST: + """ + if direction == Direction.CAUSE: return self.cause_repertoire(mechanism, purview) - elif direction == Direction.FUTURE: + elif direction == Direction.EFFECT: return self.effect_repertoire(mechanism, purview) - else: - # TODO: test that ValueError is raised - validate.direction(direction) + # TODO: test that ValueError is raised + return validate.direction(direction) def unconstrained_repertoire(self, direction, purview): - '''Return the unconstrained cause/effect repertoire over a purview.''' + """Return the unconstrained cause/effect repertoire over a purview.""" return self.repertoire(direction, (), purview) def unconstrained_cause_repertoire(self, purview): - '''Return the unconstrained cause repertoire for a purview. + """Return the unconstrained cause repertoire for a purview. This is just the cause repertoire in the absence of any mechanism. - ''' - return self.unconstrained_repertoire(Direction.PAST, purview) + """ + return self.unconstrained_repertoire(Direction.CAUSE, purview) def unconstrained_effect_repertoire(self, purview): - '''Return the unconstrained effect repertoire for a purview. + """Return the unconstrained effect repertoire for a purview. This is just the effect repertoire in the absence of any mechanism. - ''' - return self.unconstrained_repertoire(Direction.FUTURE, purview) + """ + return self.unconstrained_repertoire(Direction.EFFECT, purview) def partitioned_repertoire(self, direction, partition): - '''Compute the repertoire of a partitioned mechanism and purview.''' + """Compute the repertoire of a partitioned mechanism and purview.""" repertoires = [ self.repertoire(direction, part.mechanism, part.purview) for part in partition @@ -417,10 +426,10 @@ def partitioned_repertoire(self, direction, partition): return functools.reduce(np.multiply, repertoires) def expand_repertoire(self, direction, repertoire, new_purview=None): - '''Distribute an effect repertoire over a larger purview. + """Distribute an effect repertoire over a larger purview. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. repertoire (np.ndarray): The repertoire to expand. Keyword Args: @@ -435,7 +444,7 @@ def expand_repertoire(self, direction, repertoire, new_purview=None): Raises: ValueError: If the expanded purview doesn't contain the original purview. - ''' + """ if repertoire is None: return None @@ -457,32 +466,36 @@ def expand_repertoire(self, direction, repertoire, new_purview=None): return distribution.normalize(expanded_repertoire) def expand_cause_repertoire(self, repertoire, new_purview=None): - '''Same as |expand_repertoire| with ``direction`` set to |PAST|.''' - return self.expand_repertoire(Direction.PAST, repertoire, + """Same as |expand_repertoire| with ``direction`` set to |CAUSE|.""" + return self.expand_repertoire(Direction.CAUSE, repertoire, new_purview) def expand_effect_repertoire(self, repertoire, new_purview=None): - '''Same as |expand_repertoire| with ``direction`` set to |FUTURE|.''' - return self.expand_repertoire(Direction.FUTURE, repertoire, + """Same as |expand_repertoire| with ``direction`` set to |EFFECT|.""" + return self.expand_repertoire(Direction.EFFECT, repertoire, new_purview) def cause_info(self, mechanism, purview): - '''Return the cause information for a mechanism over a purview.''' - return measure(Direction.PAST, - self.cause_repertoire(mechanism, purview), - self.unconstrained_cause_repertoire(purview)) + """Return the cause information for a mechanism over a purview.""" + return repertoire_distance( + Direction.CAUSE, + self.cause_repertoire(mechanism, purview), + self.unconstrained_cause_repertoire(purview) + ) def effect_info(self, mechanism, purview): - '''Return the effect information for a mechanism over a purview.''' - return measure(Direction.FUTURE, - self.effect_repertoire(mechanism, purview), - self.unconstrained_effect_repertoire(purview)) + """Return the effect information for a mechanism over a purview.""" + return repertoire_distance( + Direction.EFFECT, + self.effect_repertoire(mechanism, purview), + self.unconstrained_effect_repertoire(purview) + ) def cause_effect_info(self, mechanism, purview): - '''Return the cause-effect information for a mechanism over a purview. + """Return the cause-effect information for a mechanism over a purview. This is the minimum of the cause and effect information. - ''' + """ return min(self.cause_info(mechanism, purview), self.effect_info(mechanism, purview)) @@ -490,50 +503,50 @@ def cause_effect_info(self, mechanism, purview): # ========================================================================= def evaluate_partition(self, direction, mechanism, purview, partition, - unpartitioned_repertoire=None): - '''Return the |small_phi| of a mechanism over a purview for the given + repertoire=None): + """Return the |small_phi| of a mechanism over a purview for the given partition. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. partition (Bipartition): The partition to evaluate. Keyword Args: - unpartitioned_repertoire (np.array): The unpartitioned repertoire. + repertoire (np.array): The unpartitioned repertoire. If not supplied, it will be computed. Returns: tuple[int, np.ndarray]: The distance between the unpartitioned and partitioned repertoires, and the partitioned repertoire. - ''' - if unpartitioned_repertoire is None: - unpartitioned_repertoire = self.repertoire(direction, mechanism, - purview) + """ + if repertoire is None: + repertoire = self.repertoire(direction, mechanism, purview) partitioned_repertoire = self.partitioned_repertoire(direction, partition) - phi = measure(direction, unpartitioned_repertoire, - partitioned_repertoire) + phi = repertoire_distance( + direction, repertoire, partitioned_repertoire) return (phi, partitioned_repertoire) def find_mip(self, direction, mechanism, purview): - '''Return the minimum information partition for a mechanism over a + """Return the minimum information partition for a mechanism over a purview. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. Returns: - Mip: The mininum-information partition in one temporal direction. - ''' + RepertoireIrreducibilityAnalysis: The irreducibility analysis for + the mininum-information partition in one temporal direction. + """ # We default to the null MIP (the MIP of a reducible mechanism) - mip = _null_mip(direction, mechanism, purview) + mip = _null_ria(direction, mechanism, purview) if not purview: return mip @@ -541,25 +554,26 @@ def find_mip(self, direction, mechanism, purview): phi_min = float('inf') # Calculate the unpartitioned repertoire to compare against the # partitioned ones. - unpartitioned_repertoire = self.repertoire(direction, mechanism, - purview) + repertoire = self.repertoire(direction, mechanism, purview) def _mip(phi, partition, partitioned_repertoire): # Prototype of MIP with already known data # TODO: Use properties here to infer mechanism and purview from # partition yet access them with `.mechanism` and `.purview`. - return Mip(phi=phi, - direction=direction, - mechanism=mechanism, - purview=purview, - partition=partition, - unpartitioned_repertoire=unpartitioned_repertoire, - partitioned_repertoire=partitioned_repertoire, - subsystem=self) + return RepertoireIrreducibilityAnalysis( + phi=phi, + direction=direction, + mechanism=mechanism, + purview=purview, + partition=partition, + repertoire=repertoire, + partitioned_repertoire=partitioned_repertoire, + subsystem=self + ) # State is unreachable - return 0 instead of giving nonsense results - if (direction == Direction.PAST and - np.all(unpartitioned_repertoire == 0)): + if (direction == Direction.CAUSE and + np.all(repertoire == 0)): return _mip(0, None, None) # Loop over possible MIP partitions @@ -568,7 +582,7 @@ def _mip(phi, partition, partitioned_repertoire): # repertoire. phi, partitioned_repertoire = self.evaluate_partition( direction, mechanism, purview, partition, - unpartitioned_repertoire=unpartitioned_repertoire) + repertoire=repertoire) # Return immediately if mechanism is reducible. if phi == 0: @@ -581,58 +595,58 @@ def _mip(phi, partition, partitioned_repertoire): return mip - def mip_past(self, mechanism, purview): - '''Return the past minimum information partition. + def cause_mip(self, mechanism, purview): + """Return the irreducibility analysis for the cause MIP. - Alias for |find_mip| with ``direction`` set to |PAST|. - ''' - return self.find_mip(Direction.PAST, mechanism, purview) + Alias for |find_mip| with ``direction`` set to |CAUSE|. + """ + return self.find_mip(Direction.CAUSE, mechanism, purview) - def mip_future(self, mechanism, purview): - '''Return the future minimum information partition. + def effect_mip(self, mechanism, purview): + """Return the irreducibility analysis for the effect MIP. - Alias for |find_mip| with ``direction`` set to |FUTURE|. - ''' - return self.find_mip(Direction.FUTURE, mechanism, purview) + Alias for |find_mip| with ``direction`` set to |EFFECT|. + """ + return self.find_mip(Direction.EFFECT, mechanism, purview) - def phi_mip_past(self, mechanism, purview): - '''Return the |small_phi| of the past minimum information partition. + def phi_cause_mip(self, mechanism, purview): + """Return the |small_phi| of the cause MIP. This is the distance between the unpartitioned cause repertoire and the MIP cause repertoire. - ''' - mip = self.mip_past(mechanism, purview) + """ + mip = self.cause_mip(mechanism, purview) return mip.phi if mip else 0 - def phi_mip_future(self, mechanism, purview): - '''Return the |small_phi| of the future minimum information partition. + def phi_effect_mip(self, mechanism, purview): + """Return the |small_phi| of the effect MIP. This is the distance between the unpartitioned effect repertoire and the MIP cause repertoire. - ''' - mip = self.mip_future(mechanism, purview) + """ + mip = self.effect_mip(mechanism, purview) return mip.phi if mip else 0 def phi(self, mechanism, purview): - '''Return the |small_phi| of a mechanism over a purview.''' - return min(self.phi_mip_past(mechanism, purview), - self.phi_mip_future(mechanism, purview)) + """Return the |small_phi| of a mechanism over a purview.""" + return min(self.phi_cause_mip(mechanism, purview), + self.phi_effect_mip(mechanism, purview)) # Phi_max methods # ========================================================================= def potential_purviews(self, direction, mechanism, purviews=False): - '''Return all purviews that could belong to the core cause/effect. + """Return all purviews that could belong to the |MIC|/|MIE|. Filters out trivially-reducible purviews. Args: - direction (Direction): |PAST| or |FUTURE|. + direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism of interest. Keyword Args: purviews (tuple[int]): Optional subset of purviews of interest. - ''' + """ if purviews is False: purviews = self.network.potential_purviews(direction, mechanism) # Filter out purviews that aren't in the subsystem @@ -646,10 +660,10 @@ def potential_purviews(self, direction, mechanism, purviews=False): @cache.method('_mice_cache') def find_mice(self, direction, mechanism, purviews=False): - '''Return the maximally irreducible cause or effect for a mechanism. + """Return the |MIC| or |MIE| for a mechanism. Args: - direction (Direction): :|PAST| or |FUTURE|. + direction (Direction): :|CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism to be tested for irreducibility. @@ -660,96 +674,121 @@ def find_mice(self, direction, mechanism, purviews=False): nodes. Returns: - Mice: The maximally-irreducible cause or effect in one temporal - direction. - - .. note:: - Strictly speaking, the MICE is a pair of repertoires: the core - cause repertoire and core effect repertoire of a mechanism, which - are maximally different than the unconstrained cause/effect - repertoires (*i.e.*, those that maximize |small_phi|). Here, we - return only information corresponding to one direction, |PAST| or - |FUTURE|, i.e., we return a core cause or core effect, not the pair - of them. - ''' + MaximallyIrreducibleCauseOrEffect: The |MIC| or |MIE|. + """ purviews = self.potential_purviews(direction, mechanism, purviews) if not purviews: - max_mip = _null_mip(direction, mechanism, ()) + max_mip = _null_ria(direction, mechanism, ()) else: max_mip = max(self.find_mip(direction, mechanism, purview) for purview in purviews) - return Mice(max_mip) + if direction == Direction.CAUSE: + return MaximallyIrreducibleCause(max_mip) + elif direction == Direction.EFFECT: + return MaximallyIrreducibleEffect(max_mip) + return validate.direction(direction) - def core_cause(self, mechanism, purviews=False): - '''Return the core cause repertoire of a mechanism. + def mic(self, mechanism, purviews=False): + """Return the mechanism's maximally-irreducible cause (|MIC|). - Alias for |find_mice| with ``direction`` set to |PAST|. - ''' - return self.find_mice(Direction.PAST, mechanism, purviews=purviews) + Alias for |find_mice| with ``direction`` set to |CAUSE|. + """ + return self.find_mice(Direction.CAUSE, mechanism, purviews=purviews) - def core_effect(self, mechanism, purviews=False): - '''Return the core effect repertoire of a mechanism. + def mie(self, mechanism, purviews=False): + """Return the mechanism's maximally-irreducible effect (|MIE|). - Alias for |find_mice| with ``direction`` set to |PAST|. - ''' - return self.find_mice(Direction.FUTURE, mechanism, purviews=purviews) + Alias for |find_mice| with ``direction`` set to |EFFECT|. + """ + return self.find_mice(Direction.EFFECT, mechanism, purviews=purviews) def phi_max(self, mechanism): - '''Return the |small_phi_max| of a mechanism. + """Return the |small_phi_max| of a mechanism. This is the maximum of |small_phi| taken over all possible purviews. - ''' - return min(self.core_cause(mechanism).phi, - self.core_effect(mechanism).phi) + """ + return min(self.mic(mechanism).phi, self.mie(mechanism).phi) # Big Phi methods # ========================================================================= - # TODO add `concept-space` section to the docs: @property def null_concept(self): - '''Return the null concept of this subsystem. + """Return the null concept of this subsystem. The null concept is a point in concept space identified with the unconstrained cause and effect repertoire of this subsystem. - ''' + """ # Unconstrained cause repertoire. cause_repertoire = self.cause_repertoire((), ()) # Unconstrained effect repertoire. effect_repertoire = self.effect_repertoire((), ()) # Null cause. - cause = Mice(_null_mip(Direction.PAST, (), (), cause_repertoire)) + cause = MaximallyIrreducibleCause( + _null_ria(Direction.CAUSE, (), (), cause_repertoire)) # Null effect. - effect = Mice(_null_mip(Direction.FUTURE, (), (), effect_repertoire)) + effect = MaximallyIrreducibleEffect( + _null_ria(Direction.EFFECT, (), (), effect_repertoire)) # All together now... - return Concept(mechanism=(), cause=cause, effect=effect, subsystem=self) - - def concept(self, mechanism, purviews=False, past_purviews=False, - future_purviews=False): - '''Calculate a concept. - - See :func:`pyphi.compute.concept` for more information. - ''' - # Calculate the maximally irreducible cause repertoire. - cause = self.core_cause(mechanism, - purviews=(past_purviews or purviews)) - # Calculate the maximally irreducible effect repertoire. - effect = self.core_effect(mechanism, - purviews=(future_purviews or purviews)) - # NOTE: Make sure to expand the repertoires to the size of the - # subsystem when calculating concept distance. For now, they must - # remain un-expanded so the concept doesn't depend on the subsystem. - return Concept(mechanism=mechanism, cause=cause, - effect=effect, subsystem=self) + return Concept(mechanism=(), + cause=cause, + effect=effect, + subsystem=self) + + def concept(self, mechanism, purviews=False, cause_purviews=False, + effect_purviews=False): + """Return the concept specified by a mechanism within this subsytem. + + Args: + mechanism (tuple[int]): The candidate set of nodes. + + Keyword Args: + purviews (tuple[tuple[int]]): Restrict the possible purviews to + those in this list. + cause_purviews (tuple[tuple[int]]): Restrict the possible cause + purviews to those in this list. Takes precedence over + ``purviews``. + effect_purviews (tuple[tuple[int]]): Restrict the possible effect + purviews to those in this list. Takes precedence over + ``purviews``. + + Returns: + Concept: The pair of maximally irreducible cause/effect repertoires + that constitute the concept specified by the given mechanism. + """ + start = time() + log.debug('Computing concept %s...', mechanism) + + # If the mechanism is empty, there is no concept. + if not mechanism: + result = self.null_concept + else: + # Calculate the maximally irreducible cause repertoire. + cause = self.mic(mechanism, + purviews=(cause_purviews or purviews)) + # Calculate the maximally irreducible effect repertoire. + effect = self.mie(mechanism, + purviews=(effect_purviews or purviews)) + # NOTE: Make sure to expand the repertoires to the size of the + # subsystem when calculating concept distance. For now, they must + # remain un-expanded so the concept doesn't depend on the + # subsystem. + result = Concept(mechanism=mechanism, cause=cause, effect=effect, + subsystem=self) + + result.time = round(time() - start, config.PRECISION) + log.debug('Found concept %s', mechanism) + return result def mip_partitions(mechanism, purview): - '''Return a generator over all MIP partitions, based on the current - configuration.''' + """Return a generator over all mechanism-purview partitions, based on the + current configuration. + """ func = { 'BI': mip_bipartitions, 'TRI': wedge_partitions, @@ -760,7 +799,7 @@ def mip_partitions(mechanism, purview): def mip_bipartitions(mechanism, purview): - '''Return an generator of all |small_phi| bipartitions of a mechanism over + r"""Return an generator of all |small_phi| bipartitions of a mechanism over a purview. Excludes all bipartitions where one half is entirely empty, *e.g*:: @@ -792,7 +831,7 @@ def mip_bipartitions(mechanism, purview): >>> mechanism = (0,) >>> purview = (2, 3) >>> for partition in mip_bipartitions(mechanism, purview): - ... print(partition, '\\n') # doctest: +NORMALIZE_WHITESPACE + ... print(partition, '\n') # doctest: +NORMALIZE_WHITESPACE ∅ 0 ─── ✕ ─── 2 3 @@ -804,7 +843,7 @@ def mip_bipartitions(mechanism, purview): ∅ 0 ─── ✕ ─── 2,3 ∅ - ''' + """ numerators = bipartition(mechanism) denominators = directed_bipartition(purview) @@ -814,7 +853,7 @@ def mip_bipartitions(mechanism, purview): def wedge_partitions(mechanism, purview): - '''Return an iterator over all wedge partitions. + """Return an iterator over all wedge partitions. These are partitions which strictly split the mechanism and allow a subset of the purview to be split into a third partition, e.g.:: @@ -831,14 +870,14 @@ def wedge_partitions(mechanism, purview): Yields: Tripartition: all unique tripartitions of this mechanism and purview. - ''' + """ numerators = bipartition(mechanism) denominators = directed_tripartition(purview) yielded = set() def valid(factoring): - '''Return whether the factoring should be considered.''' + """Return whether the factoring should be considered.""" # pylint: disable=too-many-boolean-expressions numerator, denominator = factoring return ( @@ -857,23 +896,25 @@ def valid(factoring): Part((), d[2])).normalize() # pylint: disable=bad-whitespace def nonempty(part): - '''Check that the part is not empty.''' + """Check that the part is not empty.""" return part.mechanism or part.purview def compressible(tripart): - '''Check if the tripartition can be transformed into a causally - equivalent partition by combing two of its parts; eg. A/∅ x B/∅ x - ∅/CD is equivalent to AB/∅ x ∅/CD so we don't include it. ''' + """Check if the tripartition can be transformed into a causally + equivalent partition by combing two of its parts; e.g., A/∅ × B/∅ × + ∅/CD is equivalent to AB/∅ × ∅/CD so we don't include it. + """ pairs = [ (tripart[0], tripart[1]), (tripart[0], tripart[2]), - (tripart[1], tripart[2])] - + (tripart[1], tripart[2]) + ] for x, y in pairs: if (nonempty(x) and nonempty(y) and (x.mechanism + y.mechanism == () or x.purview + y.purview == ())): return True + return False if not compressible(tripart) and tripart not in yielded: yielded.add(tripart) @@ -881,7 +922,7 @@ def compressible(tripart): def all_partitions(mechanism, purview): - '''Returns all possible partitions of a mechanism and purview. + """Return all possible partitions of a mechanism and purview. Partitions can consist of any number of parts. @@ -891,7 +932,7 @@ def all_partitions(mechanism, purview): Yields: KPartition: A partition of this mechanism and purview into ``k`` parts. - ''' + """ for mechanism_partition in partitions(mechanism): mechanism_partition.append([]) n_mechanism_parts = len(mechanism_partition) @@ -906,11 +947,14 @@ def all_partitions(mechanism, purview): purview_partition.extend([()] * n_empty) # Unique permutations to avoid duplicates empties - for purview_permutation in set(itertools.permutations(purview_partition)): + for purview_permutation in set( + itertools.permutations(purview_partition)): parts = [ Part(tuple(m), tuple(p)) - for m, p in zip(mechanism_partition, purview_permutation)] + for m, p in zip(mechanism_partition, + purview_permutation) + ] # Must partition the mechanism, unless the purview is fully # cut away from the mechanism. diff --git a/pyphi/timescale.py b/pyphi/timescale.py index 883df4523..271bddb18 100644 --- a/pyphi/timescale.py +++ b/pyphi/timescale.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# time.py +# timescale.py -''' +""" Functions for converting the timescale of a TPM. -''' +""" import numpy as np from scipy.sparse import csc_matrix @@ -26,7 +26,7 @@ def dense_time(tpm, time_scale): def run_tpm(tpm, time_scale): - '''Iterate a TPM by the specified number of time steps. + """Iterate a TPM by the specified number of time steps. Args: tpm (np.ndarray): A state-by-node tpm. @@ -34,7 +34,7 @@ def run_tpm(tpm, time_scale): Returns: np.ndarray - ''' + """ sbs_tpm = convert.state_by_node2state_by_state(tpm) if sparse(tpm): tpm = sparse_time(sbs_tpm, time_scale) @@ -44,7 +44,7 @@ def run_tpm(tpm, time_scale): def run_cm(cm, time_scale): - '''Iterate a connectivity matrix the specified number of steps. + """Iterate a connectivity matrix the specified number of steps. Args: cm (np.ndarray): A connectivity matrix. @@ -52,7 +52,7 @@ def run_cm(cm, time_scale): Returns: np.ndarray: The connectivity matrix at the new timescale. - ''' + """ cm = np.linalg.matrix_power(cm, time_scale) # Round non-unitary values back to 1 cm[cm > 1] = 1 diff --git a/pyphi/tpm.py b/pyphi/tpm.py index 93210af39..dfb703d63 100644 --- a/pyphi/tpm.py +++ b/pyphi/tpm.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # tpm.py -''' +""" Functions for manipulating transition probability matrices. -''' +""" from itertools import chain @@ -15,25 +15,26 @@ def tpm_indices(tpm): - '''Indices of nodes in the TPM.''' + """Return the indices of nodes in the TPM.""" return tuple(np.where(np.array(tpm.shape[:-1]) == 2)[0]) def is_state_by_state(tpm): - '''Return ``True`` if ``tpm`` is in state-by-state form, otherwise - ``False``.''' + """Return ``True`` if ``tpm`` is in state-by-state form, otherwise + ``False``. + """ return tpm.ndim == 2 and tpm.shape[0] == tpm.shape[1] def condition_tpm(tpm, fixed_nodes, state): - '''Return a TPM conditioned on the given fixed node indices, whose states + """Return a TPM conditioned on the given fixed node indices, whose states are fixed according to the given state-tuple. The dimensions of the new TPM that correspond to the fixed nodes are collapsed onto their state, making those dimensions singletons suitable for broadcasting. The number of dimensions of the conditioned TPM will be the same as the unconditioned TPM. - ''' + """ conditioning_indices = [[slice(None)]] * len(state) for i in fixed_nodes: # Preserve singleton dimensions with `np.newaxis` @@ -46,14 +47,15 @@ def condition_tpm(tpm, fixed_nodes, state): def expand_tpm(tpm): - '''Broadcast a state-by-node TPM so that singleton dimensions are expanded - over the full network.''' - uc = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]]) - return tpm * uc + """Broadcast a state-by-node TPM so that singleton dimensions are expanded + over the full network. + """ + unconstrained = np.ones([2] * (tpm.ndim - 1) + [tpm.shape[-1]]) + return tpm * unconstrained def marginalize_out(node_indices, tpm): - '''Marginalize out nodes from a TPM. + """Marginalize out nodes from a TPM. Args: node_indices (list[int]): The indices of nodes to be marginalized out. @@ -62,36 +64,36 @@ def marginalize_out(node_indices, tpm): Returns: np.ndarray: A TPM with the same number of dimensions, with the nodes marginalized out. - ''' + """ return tpm.sum(tuple(node_indices), keepdims=True) / ( np.array(tpm.shape)[list(node_indices)].prod()) def infer_edge(tpm, a, b, contexts): - '''Infer the presence or absence of an edge from node A to node B. + """Infer the presence or absence of an edge from node A to node B. - Let S be the set of all nodes in a network. Let A' = S - {A}. - We call the state of A' the context C of A. - There is an edge from A to B if there exists any context C(A) such that - p(B | C(A), A=0) =/= p(B | C(A), A=1). + Let S be the set of all nodes in a network. Let A' = S - {A}. We call the + state of A' the context C of A. There is an edge from A to B if there + exists any context C(A) such that P(B | C(A), A=0) =/= P(B | C(A), A=1). Args: - tpm (np.ndarray): The TPM in state-by-node, n-dimensional form. + tpm (np.ndarray): The TPM in state-by-node, multidimensional form. a (int): The index of the putative source node. b (int): The index of the putative sink node. Returns: bool: True if the edge A->B exists, False otherwise. - ''' + """ def a_in_context(context): - '''Given a context C(A), return the states of the full system with A - off and on, respectively.''' + """Given a context C(A), return the states of the full system with A + OFF and ON, respectively. + """ a_off = context[:a] + OFF + context[a:] a_on = context[:a] + ON + context[a:] return (a_off, a_on) def a_affects_b_in_context(context): - '''Returns True if A has an effect on B, given a context.''' + """Return ``True`` if A has an effect on B, given a context.""" a_off, a_on = a_in_context(context) return tpm[a_off][b] != tpm[a_on][b] @@ -99,8 +101,9 @@ def a_affects_b_in_context(context): def infer_cm(tpm): - '''Infer the connectivity matrix associated with a state-by-node TPM in - n-dimensional form.''' + """Infer the connectivity matrix associated with a state-by-node TPM in + multidimensional form. + """ network_size = tpm.shape[-1] all_contexts = tuple(all_states(network_size - 1)) cm = np.empty((network_size, network_size), dtype=int) diff --git a/pyphi/utils.py b/pyphi/utils.py index 5574cf0ce..a5cf48428 100644 --- a/pyphi/utils.py +++ b/pyphi/utils.py @@ -2,14 +2,14 @@ # -*- coding: utf-8 -*- # utils.py -''' +""" Functions used by more than one PyPhi module or class, or that might be of external use. -''' +""" import hashlib import os -from itertools import chain, combinations, islice, product +from itertools import chain, combinations, product import numpy as np from scipy.misc import comb @@ -18,40 +18,40 @@ def state_of(nodes, network_state): - '''Return the state-tuple of the given nodes.''' + """Return the state-tuple of the given nodes.""" return tuple(network_state[n] for n in nodes) if nodes else () -def all_states(n, holi=False): - '''Return all binary states for a system. +def all_states(n, big_endian=False): + """Return all binary states for a system. Args: n (int): The number of elements in the system. - holi (bool): Whether to return the states in HOLI order instead of LOLI - order. + big_endian (bool): Whether to return the states in big-endian order + instead of little-endian order. Yields: - tuple[int]: The next state of an ``n``-element system, in LOLI order - unless ``holi`` is ``True``. - ''' + tuple[int]: The next state of an ``n``-element system, in little-endian + order unless ``big_endian`` is ``True``. + """ if n == 0: return for state in product((0, 1), repeat=n): - if holi: + if big_endian: yield state else: - yield state[::-1] # Convert to LOLI-ordering + yield state[::-1] # Convert to little-endian ordering def np_immutable(a): - '''Make a NumPy array immutable.''' + """Make a NumPy array immutable.""" a.flags.writeable = False return a def np_hash(a): - '''Return a hash of a NumPy array.''' + """Return a hash of a NumPy array.""" if a is None: return hash(None) # Ensure that hashes are equal whatever the ordering in memory (C or @@ -62,8 +62,9 @@ def np_hash(a): class np_hashable: - '''A hashable wrapper around a NumPy array.''' + """A hashable wrapper around a NumPy array.""" # pylint: disable=protected-access,too-few-public-methods + def __init__(self, array): self._array = np_immutable(array.copy()) @@ -78,13 +79,13 @@ def __repr__(self): def eq(x, y): - '''Compare two values up to |PRECISION|.''' + """Compare two values up to |PRECISION|.""" return abs(x - y) <= constants.EPSILON # see http://stackoverflow.com/questions/16003217 def combs(a, r): - '''NumPy implementation of ``itertools.combinations``. + """NumPy implementation of ``itertools.combinations``. Return successive ``r``-length combinations of elements in the array ``a``. @@ -94,7 +95,7 @@ def combs(a, r): Returns: np.ndarray: An array of combinations. - ''' + """ # Special-case for 0-length combinations if r == 0: return np.asarray([]) @@ -107,7 +108,7 @@ def combs(a, r): # see http://stackoverflow.com/questions/16003217/ def comb_indices(n, k): - '''``n``-dimensional version of itertools.combinations. + """``n``-dimensional version of itertools.combinations. Args: a (np.ndarray): The array from which to get combinations. @@ -127,7 +128,7 @@ def comb_indices(n, k): [[3, 4], [3, 5], [4, 5]]]) - ''' + """ # Count the number of combinations for preallocation count = comb(n, k, exact=True) # Get numpy iterable from ``itertools.combinations`` @@ -141,7 +142,7 @@ def comb_indices(n, k): # From https://docs.python.org/3/library/itertools.html#itertools-recipes def powerset(iterable, nonempty=False, reverse=False): - '''Generate the power set of an iterable. + """Generate the power set of an iterable. Args: iterable (Iterable): The iterable from which to generate the power set. @@ -163,10 +164,10 @@ def powerset(iterable, nonempty=False, reverse=False): >>> ps = powerset(np.arange(2), nonempty=True, reverse=True) >>> list(ps) [(1, 0), (1,), (0,)] - ''' + """ iterable = list(iterable) - if nonempty: # Don't include 0-length subsets + if nonempty: # Don't include 0-length subsets start = 1 else: start = 0 @@ -181,7 +182,7 @@ def powerset(iterable, nonempty=False, reverse=False): def load_data(directory, num): - '''Load numpy data from the data directory. + """Load numpy data from the data directory. The files should stored in ``../data/`` and named ``0.npy, 1.npy, ... .npy``. @@ -189,8 +190,7 @@ def load_data(directory, num): Returns: list: A list of loaded data, such that ``list[i]`` contains the the contents of ``i.npy``. - ''' - + """ root = os.path.abspath(os.path.dirname(__file__)) def get_path(i): # pylint: disable=missing-docstring diff --git a/pyphi/validate.py b/pyphi/validate.py index 950cdd827..6205ea4a6 100644 --- a/pyphi/validate.py +++ b/pyphi/validate.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- # validate.py -''' +""" Methods for validating arguments. -''' +""" import numpy as np @@ -16,12 +16,12 @@ def direction(direction, allow_bi=False): - '''Validate that the given direction is one of the allowed constants. + """Validate that the given direction is one of the allowed constants. If ``allow_bi`` is ``True`` then ``Direction.BIDIRECTIONAL`` is acceptable. - ''' - valid = [Direction.PAST, Direction.FUTURE] + """ + valid = [Direction.CAUSE, Direction.EFFECT] if allow_bi: valid.append(Direction.BIDIRECTIONAL) @@ -32,16 +32,18 @@ def direction(direction, allow_bi=False): def tpm(tpm, check_independence=True): - '''Validate a TPM. + """Validate a TPM. The TPM can be in * 2-dimensional state-by-state form, * 2-dimensional state-by-node form, or - * n-dimensional state-by-node form. - ''' - see_tpm_docs = ('See documentation for `pyphi.Network` for more ' - 'information on TPM formats.') + * multidimensional state-by-node form. + """ + see_tpm_docs = ( + 'See the documentation on TPM conventions and the `pyphi.Network` ' + 'object for more information on TPM forms.' + ) # Cast to np.array. tpm = np.array(tpm) # Get the number of nodes from the state-by-node TPM. @@ -59,18 +61,18 @@ def tpm(tpm, check_independence=True): elif tpm.ndim == (N + 1): if tpm.shape != tuple([2] * N + [N]): raise ValueError( - 'Invalid shape for n-dimensional state-by-node TPM: {}\nThe ' - 'shape should be {} for {} nodes. {}'.format( + 'Invalid shape for multidimensional state-by-node TPM: {}\n' + 'The shape should be {} for {} nodes. {}'.format( tpm.shape, ([2] * N) + [N], N, see_tpm_docs)) else: raise ValueError( - 'Invalid TPM: Must be either 2-dimensional or n-dimensional. ' + 'Invalid TPM: Must be either 2-dimensional or multidimensional. ' '{}'.format(see_tpm_docs)) return True def conditionally_independent(tpm): - '''Validate that the TPM is conditionally independent.''' + """Validate that the TPM is conditionally independent.""" if not config.VALIDATE_CONDITIONAL_INDEPENDENCE: return True tpm = np.array(tpm) @@ -82,13 +84,14 @@ def conditionally_independent(tpm): convert.state_by_node2state_by_state(tpm)) if np.any((tpm - there_and_back_again) >= EPSILON): raise exceptions.ConditionallyDependentError( - 'TPM is not conditionally independent. See the conditional ' - 'independence example in the documentation for more info.') + 'TPM is not conditionally independent.\n' + 'See the conditional independence example in the documentation ' + 'for more info.') return True def connectivity_matrix(cm): - '''Validate the given connectivity matrix.''' + """Validate the given connectivity matrix.""" # Special case for empty matrices. if cm.size == 0: return True @@ -103,7 +106,7 @@ def connectivity_matrix(cm): def node_labels(node_labels, node_indices): - '''Validate that there is a label for each node.''' + """Validate that there is a label for each node.""" if node_labels is None: return @@ -116,10 +119,10 @@ def node_labels(node_labels, node_indices): def network(n): - '''Validate a |Network|. + """Validate a |Network|. Checks the TPM and connectivity matrix. - ''' + """ tpm(n.tpm) connectivity_matrix(n.cm) node_labels(n.node_labels, n.node_indices) @@ -130,7 +133,7 @@ def network(n): def is_network(network): - '''Validate that the argument is a |Network|.''' + """Validate that the argument is a |Network|.""" from . import Network if not isinstance(network, Network): @@ -139,14 +142,14 @@ def is_network(network): def node_states(state): - '''Check that the state contains only zeros and ones.''' + """Check that the state contains only zeros and ones.""" if not all(n in (0, 1) for n in state): raise ValueError( 'Invalid state: states must consist of only zeros and ones.') def state_length(state, size): - '''Check that the state is the given size.''' + """Check that the state is the given size.""" if len(state) != size: raise ValueError('Invalid state: there must be one entry per ' 'node in the network; this state has {} entries, but ' @@ -155,7 +158,7 @@ def state_length(state, size): def state_reachable(subsystem): - '''Return whether a state can be reached according to the network's TPM.''' + """Return whether a state can be reached according to the network's TPM.""" # If there is a row `r` in the TPM such that all entries of `r - state` are # between -1 and 1, then the given state has a nonzero probability of being # reached from some state. @@ -169,17 +172,17 @@ def state_reachable(subsystem): def cut(cut, node_indices): - '''Check that the cut is for only the given nodes.''' + """Check that the cut is for only the given nodes.""" if cut.indices != node_indices: raise ValueError('{} nodes are not equal to subsystem nodes ' '{}'.format(cut, node_indices)) def subsystem(s): - '''Validate a |Subsystem|. + """Validate a |Subsystem|. Checks its state and cut. - ''' + """ node_states(s.state) cut(s.cut, s.cut_indices) if config.VALIDATE_SUBSYSTEM_STATES: @@ -188,13 +191,13 @@ def subsystem(s): def time_scale(time_scale): - '''Validate a macro temporal time scale.''' + """Validate a macro temporal time scale.""" if time_scale <= 0 or isinstance(time_scale, float): raise ValueError('time scale must be a positive integer') def partition(partition): - '''Validate a partition - used by blackboxes and coarse grains.''' + """Validate a partition - used by blackboxes and coarse grains.""" nodes = set() for part in partition: for node in part: @@ -206,7 +209,7 @@ def partition(partition): def coarse_grain(coarse_grain): - '''Validate a macro coarse-graining.''' + """Validate a macro coarse-graining.""" partition(coarse_grain.partition) if len(coarse_grain.partition) != len(coarse_grain.grouping): @@ -222,8 +225,7 @@ def coarse_grain(coarse_grain): def blackbox(blackbox): - '''Validate a macro blackboxing.''' - + """Validate a macro blackboxing.""" if tuple(sorted(blackbox.output_indices)) != blackbox.output_indices: raise ValueError('Output indices {} must be ordered'.format( blackbox.output_indices)) @@ -238,9 +240,9 @@ def blackbox(blackbox): def blackbox_and_coarse_grain(blackbox, coarse_grain): - '''Validate that a coarse-graining properly combines the outputs of a - blackboxing.''' - + """Validate that a coarse-graining properly combines the outputs of a + blackboxing. + """ if blackbox is None: return diff --git a/pyphi_config.yml b/pyphi_config.yml index bd73d857f..7855a243f 100644 --- a/pyphi_config.yml +++ b/pyphi_config.yml @@ -23,22 +23,28 @@ MEASURE: "EMD" PARTITION_TYPE: "BI" # Controls how to resolve phi-ties when computing MICE. PICK_SMALLEST_PURVIEW: false -# Use the difference in sum of small phi for the constellation distance -USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE: false +# Use the difference in sum of small phi for the cause-effect structure +# distance +USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE: false # The type of system cuts to use SYSTEM_CUTS: "3.0_STYLE" # In some applications of this library, you may want to allow single # micro-nodes with a self-loop to have nonzero Phi. See ``pyphi.config`` for # details. SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI: false +# Controls whether a subsystem's state is validated when the subsystem is +# created. +VALIDATE_SUBSYSTEM_STATES: true +# Controls whether a system is validated for conditional independence. +VALIDATE_CONDITIONAL_INDEPENDENCE: true # Parallelization and system resources # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Controls whether concepts are evaluated in parallel. PARALLEL_CONCEPT_EVALUATION: false -# Controls whether cuts are evaluated in parallel, which requires more -# memory. If cuts are evaluated sequentially, only two BigMips need to be -# in memory at a time. +# Controls whether cuts are evaluated in parallel, which requires more memory. +# If cuts are evaluated sequentially, only two SIAs need to be in memory at a +# time. PARALLEL_CUT_EVALUATION: true # Controls whether complexes are evaluated in parallel. PARALLEL_COMPLEX_EVALUATION: false @@ -47,16 +53,21 @@ PARALLEL_COMPLEX_EVALUATION: false NUMBER_OF_CORES: -1 # Some functions are memoized using an in-memory cache. This is the maximum # percentage of memory that these caches can collectively use. -MAXIMUM_CACHE_MEMORY_PERCENTAGE: 50 +MAXIMUM_CACHE_MEMORY_PERCENTAGE: 100 # Memoization and caching # ~~~~~~~~~~~~~~~~~~~~~~~ -# Controls whether BigMips are cached and retreived. -CACHE_BIGMIPS: false +# Controls whether SIAs are cached. +CACHE_SIAS: false +# Controls whether cause and effect repertoires are cached. +CACHE_REPERTOIRES: true # Controls whether the potential purviews of the mechanisms of a network are # cached. Speeds up calculations when the same network is used repeatedly, but # takes up additional memory, and makes network initialization slow. CACHE_POTENTIAL_PURVIEWS: true +# Controls whether subsystem caches are automatically cleared after computing +# the SIA for the subsystem. +CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA: false # The caching system to use. "fs" means cache the results on the local # filesystem, in a subdirectory of the current directory; "db" means connect to # a database and store the results there. @@ -74,7 +85,7 @@ MONGODB_CONFIG: port: 27017 database_name: "pyphi" collection_name: "test" -# Use a Redis server as a Mice cache +# Use a Redis server as a MICE cache REDIS_CACHE: false # Redis connection configuration REDIS_CONFIG: @@ -83,13 +94,12 @@ REDIS_CONFIG: # Logging # ~~~~~~~ -# These are the settings for PyPhi logging. -# The file to log to -LOG_FILE: "pyphi.log" -# The log level to write to `LOG_FILE` -LOG_FILE_LEVEL: "INFO" -# The log level to write to stdout +# The log level to write to stdout. LOG_STDOUT_LEVEL: "WARNING" +# The log level to write to `LOG_FILE`. +LOG_FILE_LEVEL: "INFO" +# The file to log to. +LOG_FILE: "pyphi.log" # Log the current configuration when PyPhi is imported. This is useful for # checking what settings were used for a previous calculation. LOG_CONFIG_ON_IMPORT: true @@ -104,11 +114,3 @@ PRINT_FRACTIONS: true # ~~~~~~~~~~~~~~~~~~~ # The number of decimal places to which Phi values are considered accurate. PRECISION: 6 - -# Miscellaneous -# ~~~~~~~~~~~~~ -# Controls whether a subsystem's state is validated when the subsystem is -# created. -VALIDATE_SUBSYSTEM_STATES: true -# Controls whether a system is validated for conditional independence. -VALIDATE_CONDITIONAL_INDEPENDENCE: true diff --git a/pytest.ini b/pytest.ini index 7ef64fb19..ac6dfd065 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,3 @@ [pytest] -addopts = --maxfail=0 --durations=5 --color=yes --tb=auto --doctest-glob='*.rst' --doctest-modules --ignore ./profiling --ignore ./benchmarks -vv +addopts = --maxfail=1 --durations=5 --color=yes --tb=auto --doctest-glob='*.rst' --doctest-modules --ignore ./profiling --ignore ./benchmarks -vv norecursedirs = docs/_* diff --git a/test/conftest.py b/test/conftest.py index 1c844f4b6..487ef72cb 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -38,7 +38,7 @@ def _flush_database_cache(): @pytest.fixture def flushcache(): - '''Flush the currently enabled cache.''' + """Flush the currently enabled cache.""" def cache_flusher(): log.info("FLUSHING CACHE!") if config.CACHING_BACKEND == constants.DATABASE: @@ -50,8 +50,8 @@ def cache_flusher(): @pytest.fixture(scope="session") def restore_fs_cache(request): - '''Temporarily backup, then restore, the user's joblib cache after each - testing session.''' + """Temporarily backup, then restore, the user's joblib cache after each + testing session.""" # Move the joblib cache to a backup location and create a fresh cache if # filesystem caching is enabled if config.CACHING_BACKEND == constants.FILESYSTEM: diff --git a/test/data/rule152_results.pkl b/test/data/rule152_results.pkl index 01d1c2c7d..1006f8f1e 100644 Binary files a/test/data/rule152_results.pkl and b/test/data/rule152_results.pkl differ diff --git a/test/example_networks.py b/test/example_networks.py index fbbe38db6..645a8fc39 100644 --- a/test/example_networks.py +++ b/test/example_networks.py @@ -62,7 +62,7 @@ def noised(cm=False): [1, 1, 1] ]) cm = cm if use_connectivity_matrices else None - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def s_noised(): @@ -106,20 +106,20 @@ def simple(cm=False): TPM: - +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ - | Past state ~~> Current state | - |~~~~~~~~~~~~~~+~~~~~~~~~~~~~~~~| - | A, B, C | A, B, C | - |~~~~~~~~~~~~~~+~~~~~~~~~~~~~~~~| - | {0, 0, 0} | {0, 0, 0} | - | {0, 0, 1} | {0, 0, 0} | - | {0, 1, 0} | {0, 0, 0} | - | {0, 1, 1} | {1, 0, 0} | - | {1, 0, 0} | {0, 0, 0} | - | {1, 0, 1} | {0, 0, 0} | - | {1, 1, 0} | {0, 0, 0} | - | {1, 1, 1} | {0, 0, 0} | - +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ + | Previous state ~~> Current state | + |~~~~~~~~~~~~~~~~~~+~~~~~~~~~~~~~~~~| + | A, B, C | A, B, C | + |~~~~~~~~~~~~~~~~~~+~~~~~~~~~~~~~~~~| + | {0, 0, 0} | {0, 0, 0} | + | {0, 0, 1} | {0, 0, 0} | + | {0, 1, 0} | {0, 0, 0} | + | {0, 1, 1} | {1, 0, 0} | + | {1, 0, 0} | {0, 0, 0} | + | {1, 0, 1} | {0, 0, 0} | + | {1, 1, 0} | {0, 0, 0} | + | {1, 1, 1} | {0, 0, 0} | + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ """ tpm = np.array([ [0, 0, 0], @@ -133,7 +133,7 @@ def simple(cm=False): ]) if cm is False: cm = None - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def simple_subsys_all_off(): @@ -183,7 +183,7 @@ def big(cm=None): [1, 1, 1, 1, 1], [1, 1, 1, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def big_subsys_all(): @@ -208,7 +208,7 @@ def reducible(cm=False): if cm is False: cm = np.array([[1, 0], [0, 1]]) - r = Network(tpm, connectivity_matrix=cm) + r = Network(tpm, cm=cm) state = (0, 0) # Return the full subsystem return Subsystem(r, state, range(r.size)) @@ -257,7 +257,7 @@ def rule30(cm=False): [0, 0, 1, 1, 1], [1, 0, 0, 1, 1] ]) - rule30 = Network(tpm, connectivity_matrix=cm) + rule30 = Network(tpm, cm=cm) all_off = (0, 0, 0, 0, 0) return Subsystem(rule30, all_off, range(rule30.size)) @@ -269,7 +269,7 @@ def trivial(): [1] ]) cm = np.array([[1]]) - net = Network(tpm, connectivity_matrix=cm) + net = Network(tpm, cm=cm) state = (1, ) return Subsystem(net, state, range(net.size)) @@ -545,7 +545,7 @@ def eight_node(cm=False): [0, 0, 0, 0, 0, 1, 1, 1], [1, 0, 0, 0, 0, 0, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def eights(): @@ -573,7 +573,7 @@ def eight_node_sbs(cm=False): [0, 0, 0, 0, 0, 1, 1, 1], [1, 0, 0, 0, 0, 0, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def rule152(cm=False): @@ -619,7 +619,7 @@ def rule152(cm=False): [0, 0, 1, 1, 1], [1, 0, 0, 1, 1]] ) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def rule152_s(): @@ -646,7 +646,7 @@ def macro(cm=False): [1, 1], [1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def macro_s(): @@ -697,7 +697,7 @@ def micro(cm=False): [1, 1, 1, 1], [1, 1, 1, 1] ]) - return Network(tpm, connectivity_matrix=cm) + return Network(tpm, cm=cm) def micro_s(): @@ -714,9 +714,9 @@ def micro_s_all_off(): # TODO: move to pyphi.examples? def propagation_delay(): - '''The basic PyPhi subsystem with COPY gates on each of the connections in + """The basic PyPhi subsystem with COPY gates on each of the connections in the original network, blackboxed over two time steps. - ''' + """ nodes = 8 tpm = np.zeros((2 ** nodes, nodes)) diff --git a/test/test_actual.py b/test/test_actual.py index 37d0bdf4c..73ba7c250 100644 --- a/test/test_actual.py +++ b/test/test_actual.py @@ -16,7 +16,7 @@ @pytest.fixture def transition(): - '''An OR gate with two inputs. The OR gate is ON, others are OFF.''' + """An OR gate with two inputs. The OR gate is ON, others are OFF.""" tpm = np.array([ [0, 0.5, 0.5], [0, 0.5, 0.5], @@ -54,11 +54,11 @@ def prevention(): @pytest.fixture def background_all_on(): - '''Two OR gates, both ON. + """Two OR gates, both ON. If we look at the transition A -> B, then B should be frozen at t-1, and A should have no effect on B. - ''' + """ tpm = np.array([ [0, 0], [1, 1], @@ -72,7 +72,7 @@ def background_all_on(): @pytest.fixture def background_all_off(): - '''Two OR gates, both OFF.''' + """Two OR gates, both OFF.""" tpm = np.array([ [0, 0], [1, 1], @@ -85,11 +85,12 @@ def background_all_off(): @pytest.mark.parametrize('transition,direction,mechanism,purview,ratio', [ - (background_all_off, Direction.FUTURE, (0,), (1,), 1), - (background_all_off, Direction.PAST, (1,), (0,), 1), - (background_all_on, Direction.FUTURE, (0,), (1,), 0), - (background_all_on, Direction.PAST, (1,), (0,), 0)]) -def test_background_conditions(transition, direction, mechanism, purview, ratio): + (background_all_off, Direction.EFFECT, (0,), (1,), 1), + (background_all_off, Direction.CAUSE, (1,), (0,), 1), + (background_all_on, Direction.EFFECT, (0,), (1,), 0), + (background_all_on, Direction.CAUSE, (1,), (0,), 0)]) +def test_background_conditions(transition, direction, mechanism, purview, + ratio): assert transition()._ratio(direction, mechanism, purview) == ratio @@ -105,8 +106,8 @@ def test_background_noised(): transition = actual.Transition(network, state, state, (0,), (1,), noise_background=True) - assert transition._ratio(Direction.FUTURE, (0,), (1,)) == 0.415037 - assert transition._ratio(Direction.PAST, (1,), (0,)) == 0.415037 + assert transition._ratio(Direction.EFFECT, (0,), (1,)) == 0.415037 + assert transition._ratio(Direction.CAUSE, (1,), (0,)) == 0.415037 # Elements outside the transition are also frozen transition = actual.Transition(network, state, state, (0,), (0,), @@ -117,7 +118,7 @@ def test_background_noised(): @pytest.fixture def background_3_node(): - '''A is MAJ(ABC). B is OR(A, C). C is COPY(A).''' + """A is MAJ(ABC). B is OR(A, C). C is COPY(A).""" tpm = np.array([ [0, 0, 0], [0, 1, 1], @@ -137,22 +138,22 @@ def background_3_node(): # If C = 0, then AB over AC should be irreducible. ((1, 1, 0), (0, 2), 1.0)]) def test_background_3_node(before_state, purview, alpha, background_3_node): - '''Looking at transition (AB = 11) -> (AC = 11)''' + """Looking at transition (AB = 11) -> (AC = 11)""" after_state = (1, 1, 1) - transition = actual.Transition(background_3_node, before_state, after_state, - (0, 1), (0, 2)) - causal_link = transition.find_causal_link(Direction.FUTURE, (0, 1)) + transition = actual.Transition(background_3_node, before_state, + after_state, (0, 1), (0, 2)) + causal_link = transition.find_causal_link(Direction.EFFECT, (0, 1)) assert causal_link.purview == purview assert causal_link.alpha == alpha def test_potential_purviews(background_3_node): - '''Purviews must be a subset of the corresponding cause/effect system.''' + """Purviews must be a subset of the corresponding cause/effect system.""" transition = actual.Transition(background_3_node, (1, 1, 1), (1, 1, 1), (0, 1), (0, 2)) - assert transition.potential_purviews(Direction.PAST, (0, 2)) == [ + assert transition.potential_purviews(Direction.CAUSE, (0, 2)) == [ (0,), (1,), (0, 1)] - assert transition.potential_purviews(Direction.FUTURE, (0, 1)) == [ + assert transition.potential_purviews(Direction.EFFECT, (0, 1)) == [ (0,), (2,), (0, 2)] @@ -167,28 +168,28 @@ def test_transition_initialization(transition): def test_purview_state(transition): - assert transition.purview_state(Direction.PAST) == (0, 1, 1) - assert transition.purview_state(Direction.FUTURE) == (1, 0, 0) + assert transition.purview_state(Direction.CAUSE) == (0, 1, 1) + assert transition.purview_state(Direction.EFFECT) == (1, 0, 0) def test_mechanism_state(transition): - assert transition.mechanism_state(Direction.PAST) == (1, 0, 0) - assert transition.mechanism_state(Direction.FUTURE) == (0, 1, 1) + assert transition.mechanism_state(Direction.CAUSE) == (1, 0, 0) + assert transition.mechanism_state(Direction.EFFECT) == (0, 1, 1) def test_mechanism_indices(transition): - assert transition.mechanism_indices(Direction.PAST) == (0,) - assert transition.mechanism_indices(Direction.FUTURE) == (1, 2) + assert transition.mechanism_indices(Direction.CAUSE) == (0,) + assert transition.mechanism_indices(Direction.EFFECT) == (1, 2) def test_purview_indices(transition): - assert transition.purview_indices(Direction.PAST) == (1, 2) - assert transition.purview_indices(Direction.FUTURE) == (0,) + assert transition.purview_indices(Direction.CAUSE) == (1, 2) + assert transition.purview_indices(Direction.EFFECT) == (0,) def test_system_dict(transition): - assert transition.system[Direction.PAST] == transition.cause_system - assert transition.system[Direction.FUTURE] == transition.effect_system + assert transition.system[Direction.CAUSE] == transition.cause_system + assert transition.system[Direction.EFFECT] == transition.effect_system def test_transition_len(transition, empty_transition): @@ -207,7 +208,7 @@ def test_transition_equal(transition, empty_transition): def test_transition_apply_cut(transition): - cut = ac_cut(Direction.PAST, Part((1,), (2,)), Part((), (0,))) + cut = ac_cut(Direction.CAUSE, Part((1,), (2,)), Part((), (0,))) cut_transition = transition.apply_cut(cut) assert cut_transition.before_state == transition.before_state assert cut_transition.after_state == transition.after_state @@ -220,10 +221,11 @@ def test_transition_apply_cut(transition): def test_to_json(transition): transition.to_json() + # Test AC models # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -def acmip(**kwargs): +def acria(**kwargs): defaults = { 'alpha': 0.0, 'state': None, @@ -235,64 +237,69 @@ def acmip(**kwargs): 'partitioned_probability': 0.0, } defaults.update(kwargs) - return models.AcMip(**defaults) + return models.AcRepertoireIrreducibilityAnalysis(**defaults) def causal_link(**kwargs): - return models.CausalLink(acmip(**kwargs)) + return models.CausalLink(acria(**kwargs)) def account(links=()): return models.Account(links) -def ac_bigmip(**kwargs): +def ac_sia(**kwargs): defaults = { 'alpha': 0.0, 'direction': Direction.BIDIRECTIONAL, - 'unpartitioned_account': account(), + 'account': account(), 'partitioned_account': account(), 'transition': None, 'cut': None } defaults.update(kwargs) - return models.AcBigMip(**defaults) + return models.AcSystemIrreducibilityAnalysis(**defaults) -def test_acmip_ordering(): - assert acmip() == acmip() - assert acmip(alpha=0.0) < acmip(alpha=1.0) - assert acmip(alpha=0.0, mechanism=(1, 2)) <= acmip(alpha=1.0, mechanism=(1,)) - assert acmip(alpha=0.0, mechanism=(1, 2)) > acmip(alpha=0.0, mechanism=(1,)) +def test_acria_ordering(): + assert acria() == acria() + assert acria(alpha=0.0) < acria(alpha=1.0) + assert (acria(alpha=0.0, mechanism=(1, 2)) <= + acria(alpha=1.0, mechanism=(1,))) + assert (acria(alpha=0.0, mechanism=(1, 2)) > + acria(alpha=0.0, mechanism=(1,))) - assert bool(acmip(alpha=1.0)) is True - assert bool(acmip(alpha=0.0)) is False - assert bool(acmip(alpha=-1)) is False + assert bool(acria(alpha=1.0)) is True + assert bool(acria(alpha=0.0)) is False + assert bool(acria(alpha=-1)) is False with pytest.raises(TypeError): - acmip(direction=Direction.PAST) < acmip(direction=Direction.FUTURE) + acria(direction=Direction.CAUSE) < acria(direction=Direction.EFFECT) with config.override(PICK_SMALLEST_PURVIEW=True): - assert acmip(purview=(1,)) > acmip(purview=(0, 2)) + assert acria(purview=(1,)) > acria(purview=(0, 2)) -def test_acmip_hash(): - hash(acmip()) +def test_acria_hash(): + hash(acria()) -def test_acmip_phi_alias(): - assert acmip(alpha=3.3).phi == 3.3 +def test_acria_phi_alias(): + assert acria(alpha=3.3).phi == 3.3 def test_causal_link_ordering(): assert causal_link() == causal_link() assert causal_link(alpha=0.0) < causal_link(alpha=1.0) - assert causal_link(alpha=0.0, mechanism=(1, 2)) <= causal_link(alpha=1.0, mechanism=(1,)) - assert causal_link(alpha=0.0, mechanism=(1, 2)) > causal_link(alpha=0.0, mechanism=(1,)) + assert (causal_link(alpha=0.0, mechanism=(1, 2)) <= + causal_link(alpha=1.0, mechanism=(1,))) + assert (causal_link(alpha=0.0, mechanism=(1, 2)) > + causal_link(alpha=0.0, mechanism=(1,))) with pytest.raises(TypeError): - causal_link(direction=Direction.PAST) < causal_link(direction=Direction.FUTURE) + (causal_link(direction=Direction.CAUSE) < + causal_link(direction=Direction.EFFECT)) assert bool(causal_link(alpha=1.0)) is True assert bool(causal_link(alpha=0.0)) is False @@ -300,8 +307,8 @@ def test_causal_link_ordering(): def test_account_irreducible_causes_and_effects(): - cause = causal_link(direction=Direction.PAST) - effect = causal_link(direction=Direction.FUTURE) + cause = causal_link(direction=Direction.CAUSE) + effect = causal_link(direction=Direction.EFFECT) account = models.Account((cause, effect)) assert account.irreducible_causes == (cause,) @@ -313,29 +320,30 @@ def test_account_repr_and_str(): repr(models.Account()) -def test_ac_big_mip_repr_and_str(transition): - bm = ac_bigmip(transition=transition) +def test_ac_sia_repr_and_str(transition): + bm = ac_sia(transition=transition) str(bm) repr(bm) -def test_ac_big_mip_ordering(transition, empty_transition): - assert ac_bigmip() == ac_bigmip() - assert hash(ac_bigmip()) == hash(ac_bigmip()) +def test_ac_sia_ordering(transition, empty_transition): + assert ac_sia() == ac_sia() + assert hash(ac_sia()) == hash(ac_sia()) - assert (ac_bigmip(alpha=1.0, transition=transition) > - ac_bigmip(alpha=0.5, transition=transition)) - assert (ac_bigmip(alpha=1.0, transition=empty_transition) <= - ac_bigmip(alpha=1.0, transition=transition)) + assert (ac_sia(alpha=1.0, transition=transition) > + ac_sia(alpha=0.5, transition=transition)) + assert (ac_sia(alpha=1.0, transition=empty_transition) <= + ac_sia(alpha=1.0, transition=transition)) @pytest.mark.parametrize('direction,mechanism,purview,repertoire', [ - (Direction.PAST, (0,), (1,), [[[0.3333333], [0.66666667]]]), - (Direction.PAST, (0,), (2,), [[[0.3333333, 0.66666667]]]), - (Direction.PAST, (0,), (1, 2), [[[0, 0.3333333], [0.3333333, 0.3333333]]]), - (Direction.FUTURE, (1,), (0,), [[[0]], [[1]]]), - (Direction.FUTURE, (2,), (0,), [[[0]], [[1]]]), - (Direction.FUTURE, (1, 2), (0,), [[[0]], [[1]]]), + (Direction.CAUSE, (0,), (1,), [[[0.3333333], [0.66666667]]]), + (Direction.CAUSE, (0,), (2,), [[[0.3333333, 0.66666667]]]), + (Direction.CAUSE, (0,), (1, 2), [[[0, 0.3333333], + [0.3333333, 0.3333333]]]), + (Direction.EFFECT, (1,), (0,), [[[0]], [[1]]]), + (Direction.EFFECT, (2,), (0,), [[[0]], [[1]]]), + (Direction.EFFECT, (1, 2), (0,), [[[0]], [[1]]]), ]) def test_repertoires(direction, mechanism, purview, repertoire, transition): np.testing.assert_array_almost_equal( @@ -343,8 +351,8 @@ def test_repertoires(direction, mechanism, purview, repertoire, transition): def test_invalid_repertoires(transition): - '''Check that elements outside the transition cannot be passed in - the mechanism or purview.''' + """Check that elements outside the transition cannot be passed in + the mechanism or purview.""" with pytest.raises(ValueError): transition.effect_repertoire((1, 2), (0, 1)) @@ -366,12 +374,12 @@ def test_unconstrained_repertoires(transition): @pytest.mark.parametrize('direction,mechanism,purview,probability', [ - (Direction.PAST, (0,), (1,), 0.66666667), - (Direction.PAST, (0,), (2,), 0.66666667), - (Direction.PAST, (0,), (1, 2), 0.3333333), - (Direction.FUTURE, (1,), (0,), 1), - (Direction.FUTURE, (2,), (0,), 1), - (Direction.FUTURE, (1, 2), (0,), 1), + (Direction.CAUSE, (0,), (1,), 0.66666667), + (Direction.CAUSE, (0,), (2,), 0.66666667), + (Direction.CAUSE, (0,), (1, 2), 0.3333333), + (Direction.EFFECT, (1,), (0,), 1), + (Direction.EFFECT, (2,), (0,), 1), + (Direction.EFFECT, (1, 2), (0,), 1), ]) def test_probability(direction, mechanism, purview, probability, transition): assert np.isclose(transition.probability(direction, mechanism, purview), @@ -379,8 +387,8 @@ def test_probability(direction, mechanism, purview, probability, transition): def test_unconstrained_probability(transition): - assert transition.unconstrained_probability(Direction.PAST, (1,)) == 0.5 - assert transition.unconstrained_probability(Direction.FUTURE, (0,)) == 0.75 + assert transition.unconstrained_probability(Direction.CAUSE, (1,)) == 0.5 + assert transition.unconstrained_probability(Direction.EFFECT, (0,)) == 0.75 @pytest.mark.parametrize('mechanism,purview,ratio', [ @@ -402,54 +410,54 @@ def test_effect_ratio(mechanism, purview, ratio, transition): def test_ac_ex1_transition(transition): - '''Basic regression test for ac_ex1 example.''' + """Basic regression test for ac_ex1 example.""" - cause_account = actual.account(transition, Direction.PAST) + cause_account = actual.account(transition, Direction.CAUSE) assert len(cause_account) == 1 - cmip = cause_account[0].mip - - assert cmip.mechanism == (0,) - assert cmip.purview == (1,) - assert cmip.direction == Direction.PAST - assert cmip.state == (1, 0, 0) - assert cmip.alpha == 0.415037 - assert cmip.probability == 0.66666666666666663 - assert cmip.partitioned_probability == 0.5 - assert cmip.partition == (((), (1,)), ((0,), ())) - - effect_account = actual.account(transition, Direction.FUTURE) + cria = cause_account[0].ria + + assert cria.mechanism == (0,) + assert cria.purview == (1,) + assert cria.direction == Direction.CAUSE + assert cria.state == (1, 0, 0) + assert cria.alpha == 0.415037 + assert cria.probability == 0.66666666666666663 + assert cria.partitioned_probability == 0.5 + assert cria.partition == (((), (1,)), ((0,), ())) + + effect_account = actual.account(transition, Direction.EFFECT) assert len(effect_account) == 2 - emip0 = effect_account[0].mip - emip1 = effect_account[1].mip - - assert emip0.mechanism == (1,) - assert emip0.purview == (0,) - assert emip0.direction == Direction.FUTURE - assert emip0.state == (0, 1, 1) - assert emip0.alpha == 0.415037 - assert emip0.probability == 1.0 - assert emip0.partitioned_probability == 0.75 - assert emip0.partition == (((), (0,)), ((1,), ())) - - assert emip1.mechanism == (2,) - assert emip1.purview == (0,) - assert emip1.direction == Direction.FUTURE - assert emip1.state == (0, 1, 1) - assert emip1.alpha == 0.415037 - assert emip1.probability == 1.0 - assert emip1.partitioned_probability == 0.75 - assert emip1.partition == (((), (0,)), ((2,), ())) + eria0 = effect_account[0].ria + eria1 = effect_account[1].ria + + assert eria0.mechanism == (1,) + assert eria0.purview == (0,) + assert eria0.direction == Direction.EFFECT + assert eria0.state == (0, 1, 1) + assert eria0.alpha == 0.415037 + assert eria0.probability == 1.0 + assert eria0.partitioned_probability == 0.75 + assert eria0.partition == (((), (0,)), ((1,), ())) + + assert eria1.mechanism == (2,) + assert eria1.purview == (0,) + assert eria1.direction == Direction.EFFECT + assert eria1.state == (0, 1, 1) + assert eria1.alpha == 0.415037 + assert eria1.probability == 1.0 + assert eria1.partitioned_probability == 0.75 + assert eria1.partition == (((), (0,)), ((2,), ())) def test_actual_cut_indices(): - cut = ac_cut(Direction.PAST, Part((0,), (2,)), Part((4,), (5,))) + cut = ac_cut(Direction.CAUSE, Part((0,), (2,)), Part((4,), (5,))) assert cut.indices == (0, 2, 4, 5) - cut = ac_cut(Direction.FUTURE, Part((0, 2), (0, 2)), Part((), ())) + cut = ac_cut(Direction.EFFECT, Part((0, 2), (0, 2)), Part((), ())) assert cut.indices == (0, 2) def test_actual_apply_cut(): - cut = ac_cut(Direction.PAST, Part((0,), (0, 2)), Part((2,), ())) + cut = ac_cut(Direction.CAUSE, Part((0,), (0, 2)), Part((2,), ())) cm = np.ones((3, 3)) assert np.array_equal(cut.apply_cut(cm), np.array([ [1, 1, 0], @@ -458,7 +466,7 @@ def test_actual_apply_cut(): def test_actual_cut_matrix(): - cut = ac_cut(Direction.PAST, Part((0,), (0, 2)), Part((2,), ())) + cut = ac_cut(Direction.CAUSE, Part((0,), (0, 2)), Part((2,), ())) assert np.array_equal(cut.cut_matrix(3), np.array([ [0, 0, 1], [0, 0, 0], @@ -472,46 +480,53 @@ def ac_cut(direction, *parts): @config.override(PARTITION_TYPE='TRI') @pytest.mark.parametrize('direction,answer', [ (Direction.BIDIRECTIONAL, [ - ac_cut(Direction.PAST, Part((), ()), Part((), (1, 2)), Part((0,), ())), - ac_cut(Direction.FUTURE, Part((), ()), Part((1,), (0,)), Part((2,), ())), - ac_cut(Direction.FUTURE, Part((), ()), Part((1,), ()), Part((2,), (0,)))]), - (Direction.PAST, [ - ac_cut(Direction.PAST, Part((), ()), Part((), (1, 2)), Part((0,), ()))]), - (Direction.FUTURE, [ - ac_cut(Direction.FUTURE, Part((), ()), Part((), (0,)), Part((1, 2), ())), - ac_cut(Direction.FUTURE, Part((), ()), Part((1,), (0,)), Part((2,), ())), - ac_cut(Direction.FUTURE, Part((), ()), Part((1,), ()), Part((2,), (0,)))])]) + ac_cut(Direction.CAUSE, + Part((), ()), Part((), (1, 2)), Part((0,), ())), + ac_cut(Direction.EFFECT, + Part((), ()), Part((1,), (0,)), Part((2,), ())), + ac_cut(Direction.EFFECT, + Part((), ()), Part((1,), ()), Part((2,), (0,)))]), + (Direction.CAUSE, [ + ac_cut(Direction.CAUSE, + Part((), ()), Part((), (1, 2)), Part((0,), ()))]), + (Direction.EFFECT, [ + ac_cut(Direction.EFFECT, + Part((), ()), Part((), (0,)), Part((1, 2), ())), + ac_cut(Direction.EFFECT, + Part((), ()), Part((1,), (0,)), Part((2,), ())), + ac_cut(Direction.EFFECT, + Part((), ()), Part((1,), ()), Part((2,), (0,)))])]) def test_get_actual_cuts(direction, answer, transition): cuts = list(actual._get_cuts(transition, direction)) print(cuts, answer) np.testing.assert_array_equal(cuts, answer) -def test_big_acmip(transition): - bigmip = actual.big_acmip(transition) - assert bigmip.alpha == 0.415037 - assert bigmip.cut == ac_cut(Direction.PAST, Part((), (1,)), Part((0,), (2,))) - assert len(bigmip.unpartitioned_account) == 3 - assert len(bigmip.partitioned_account) == 2 +def test_sia(transition): + sia = actual.sia(transition) + assert sia.alpha == 0.415037 + assert sia.cut == ac_cut(Direction.CAUSE, Part((), (1,)), Part((0,), (2,))) + assert len(sia.account) == 3 + assert len(sia.partitioned_account) == 2 -def test_null_ac_bigmip(transition): - bigmip = actual._null_ac_bigmip(transition, Direction.PAST) - assert bigmip.transition == transition - assert bigmip.direction == Direction.PAST - assert bigmip.unpartitioned_account == () - assert bigmip.partitioned_account == () - assert bigmip.alpha == 0.0 +def test_null_ac_sia(transition): + sia = actual._null_ac_sia(transition, Direction.CAUSE) + assert sia.transition == transition + assert sia.direction == Direction.CAUSE + assert sia.account == () + assert sia.partitioned_account == () + assert sia.alpha == 0.0 - bigmip = actual._null_ac_bigmip(transition, Direction.PAST, alpha=float('inf')) - assert bigmip.alpha == float('inf') + sia = actual._null_ac_sia(transition, Direction.CAUSE, alpha=float('inf')) + assert sia.alpha == float('inf') @config.override(PARTITION_TYPE='TRI') def test_prevention(prevention): - assert actual.big_acmip(prevention, Direction.PAST).alpha == 0.415037 - assert actual.big_acmip(prevention, Direction.FUTURE).alpha == 0.0 - assert actual.big_acmip(prevention, Direction.BIDIRECTIONAL).alpha == 0.0 + assert actual.sia(prevention, Direction.CAUSE).alpha == 0.415037 + assert actual.sia(prevention, Direction.EFFECT).alpha == 0.0 + assert actual.sia(prevention, Direction.BIDIRECTIONAL).alpha == 0.0 def test_causal_nexus(standard): @@ -523,7 +538,7 @@ def test_causal_nexus(standard): def test_true_events(standard): - states = ((1, 0, 0), (0, 0, 1), (1, 1, 0)) # Past, current, future + states = ((1, 0, 0), (0, 0, 1), (1, 1, 0)) # Previous, current, next events = actual.true_events(standard, *states) assert len(events) == 2 @@ -534,12 +549,12 @@ def test_true_events(standard): assert true_cause1.alpha == 1.0 assert true_cause1.mechanism == (1,) assert true_cause1.purview == (2,) - assert true_cause1.direction == Direction.PAST + assert true_cause1.direction == Direction.CAUSE assert true_effect1.alpha == 1.0 assert true_effect1.mechanism == (1,) assert true_effect1.purview == (2,) - assert true_effect1.direction == Direction.FUTURE + assert true_effect1.direction == Direction.EFFECT true_cause2, true_effect2 = events[1] assert events[1].mechanism == (2,) @@ -547,24 +562,24 @@ def test_true_events(standard): assert true_cause2.alpha == 1.0 assert true_cause2.mechanism == (2,) assert true_cause2.purview == (1,) - assert true_cause2.direction == Direction.PAST + assert true_cause2.direction == Direction.CAUSE assert true_effect2.alpha == 1.0 assert true_effect2.mechanism == (2,) assert true_effect2.purview == (1,) - assert true_effect2.direction == Direction.FUTURE + assert true_effect2.direction == Direction.EFFECT -def test_true_constellation(standard): - past_state = (1, 0, 0) +def test_true_ces(standard): + previous_state = (1, 0, 0) current_state = (0, 0, 1) - future_state = (1, 1, 0) + next_state = (1, 1, 0) subsystem = Subsystem(standard, current_state, standard.node_indices) - constellation = actual.true_constellation(subsystem, past_state, future_state) + ces = actual.true_ces(subsystem, previous_state, next_state) - assert len(constellation) == 2 - actual_cause, actual_effect = constellation + assert len(ces) == 2 + actual_cause, actual_effect = ces assert actual_cause.purview == (0, 1) assert actual_cause.mechanism == (2,) @@ -574,7 +589,7 @@ def test_true_constellation(standard): def test_extrinsic_events(standard): - states = ((1, 0, 0), (0, 0, 1), (1, 1, 0)) # Past, current, future + states = ((1, 0, 0), (0, 0, 1), (1, 1, 0)) # Previous, current, next events = actual.extrinsic_events(standard, *states) @@ -586,9 +601,9 @@ def test_extrinsic_events(standard): assert true_cause.alpha == 1.0 assert true_cause.mechanism == (2,) assert true_cause.purview == (0, 1) - assert true_cause.direction == Direction.PAST + assert true_cause.direction == Direction.CAUSE assert true_effect.alpha == 1.0 assert true_effect.mechanism == (2,) assert true_effect.purview == (1,) - assert true_effect.direction == Direction.FUTURE + assert true_effect.direction == Direction.EFFECT diff --git a/test/test_big_phi.py b/test/test_big_phi.py index 631582132..e65da2180 100644 --- a/test/test_big_phi.py +++ b/test/test_big_phi.py @@ -5,15 +5,11 @@ import pickle from unittest.mock import patch -import numpy as np import pytest -from pyphi import (Direction, Network, Subsystem, compute, config, constants, - models, utils) -from pyphi.compute import constellation -from pyphi.compute.big_phi import FindMip, big_mip_bipartitions -from pyphi.models import Cut, _null_bigmip -from pyphi.partition import directed_bipartition +from pyphi import Network, Subsystem, compute, config, constants, models, utils +from pyphi.compute.subsystem import (ComputeSystemIrreducibility, + sia_bipartitions) # pylint: disable=unused-argument @@ -30,7 +26,7 @@ (0, 1): 0.333333, (0, 1, 2): 0.5 }, - 'len_partitioned_constellation': 1, + 'len_partitioned_ces': 1, 'sum_partitioned_small_phis': 0.5, 'cut': models.Cut(from_nodes=(1, 2), to_nodes=(0,)) } @@ -47,7 +43,7 @@ (1, 2): 0.263847, (0, 1, 2): 0.35 }, - 'len_partitioned_constellation': 7, + 'len_partitioned_ces': 7, 'sum_partitioned_small_phis': 0.504906, 'cut': models.Cut(from_nodes=(1, 2), to_nodes=(0,)) } @@ -87,7 +83,7 @@ (0, 2, 3, 4): 0.185709, (1, 2, 3, 4): 0.185709 }, - 'len_partitioned_constellation': 17, + 'len_partitioned_ces': 17, 'sum_partitioned_small_phis': 3.564909, 'cut': models.Cut(from_nodes=(2, 4), to_nodes=(0, 1, 3)) } @@ -103,7 +99,7 @@ (0, 1): 0.133333, (1, 2): 0.133333 }, - 'len_partitioned_constellation': 5, + 'len_partitioned_ces': 5, 'sum_partitioned_small_phis': 0.883334, 'cut': models.Cut(from_nodes=(1, 3), to_nodes=(0, 2)) } @@ -144,7 +140,7 @@ (1, 2, 3, 4): 0.25, (0, 1, 2, 3, 4): 0.25 }, - 'len_partitioned_constellation': 24, + 'len_partitioned_ces': 24, 'sum_partitioned_small_phis': 4.185363, 'cuts': [ models.Cut(from_nodes=(0, 1, 2, 3), to_nodes=(4,)), @@ -192,244 +188,241 @@ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -def check_unpartitioned_small_phis(small_phis, unpartitioned_constellation): - assert len(small_phis) == len(unpartitioned_constellation) - for c in unpartitioned_constellation: +def check_unpartitioned_small_phis(small_phis, ces): + assert len(small_phis) == len(ces) + for c in ces: assert c.phi == small_phis[c.mechanism] -def check_partitioned_small_phis(answer, partitioned_constellation): - if 'len_partitioned_constellation' in answer: - assert (answer['len_partitioned_constellation'] == - len(partitioned_constellation)) +def check_partitioned_small_phis(answer, partitioned_ces): + if 'len_partitioned_ces' in answer: + assert (answer['len_partitioned_ces'] == + len(partitioned_ces)) if 'sum_partitioned_small_phis' in answer: - assert (round(sum(c.phi for c in partitioned_constellation), + assert (round(sum(c.phi for c in partitioned_ces), config.PRECISION) == answer['sum_partitioned_small_phis']) -def check_mip(mip, answer): +def check_sia(sia, answer): # Check big phi value. - assert mip.phi == answer['phi'] - # Check small phis of unpartitioned constellation. + assert sia.phi == answer['phi'] + # Check small phis of unpartitioned CES. check_unpartitioned_small_phis(answer['unpartitioned_small_phis'], - mip.unpartitioned_constellation) - # Check sum of small phis of partitioned constellation if answer is + sia.ces) + # Check sum of small phis of partitioned CES if answer is # available. - check_partitioned_small_phis(answer, mip.partitioned_constellation) + check_partitioned_small_phis(answer, sia.partitioned_ces) # Check cut. if 'cut' in answer: - assert mip.cut == answer['cut'] + assert sia.cut == answer['cut'] elif 'cuts' in answer: - assert mip.cut in answer['cuts'] + assert sia.cut in answer['cuts'] # Tests # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -def test_null_concept(s, flushcache, restore_fs_cache): - flushcache() - cause = models.Mice(models.Mip( - unpartitioned_repertoire=s.unconstrained_cause_repertoire(()), - phi=0, direction=Direction.PAST, mechanism=(), purview=(), - partition=None, partitioned_repertoire=None)) - effect = models.Mice(models.Mip( - unpartitioned_repertoire=s.unconstrained_effect_repertoire(()), - phi=0, direction=Direction.FUTURE, mechanism=(), purview=(), - partition=None, partitioned_repertoire=None)) - assert (s.null_concept == - models.Concept(mechanism=(), cause=cause, effect=effect, - subsystem=s)) - - -def test_concept_nonexistent(s, flushcache, restore_fs_cache): - flushcache() - assert not compute.concept(s, (0, 2)) - - -@patch('pyphi.compute.distance._constellation_distance_simple') -@patch('pyphi.compute.distance._constellation_distance_emd') -def test_constellation_distance_uses_simple_vs_emd(mock_emd_distance, - mock_simple_distance, s): - '''Quick check that we use the correct constellation distance function. +@patch('pyphi.compute.distance._ces_distance_simple') +@patch('pyphi.compute.distance._ces_distance_emd') +def test_ces_distance_uses_simple_vs_emd(mock_emd_distance, + mock_simple_distance, s): + """Quick check that we use the correct CES distance function. - If the two constellations differ only in that some concepts have + If the two CESs differ only in that some concepts have moved to the null concept and all other concepts are the same then - we use the simple constellation distance. Otherwise, use the EMD. - ''' + we use the simple CES distance. Otherwise, use the EMD. + """ mock_emd_distance.return_value = float() mock_simple_distance.return_value = float() - make_mice = lambda: models.Mice(models.Mip( - phi=0, direction=None, mechanism=None, - purview=None, partition=None, - unpartitioned_repertoire=None, - partitioned_repertoire=None)) + make_mice = lambda: models.MaximallyIrreducibleCauseOrEffect( + models.RepertoireIrreducibilityAnalysis( + phi=0, direction=None, mechanism=None, purview=None, + partition=None, repertoire=None, partitioned_repertoire=None)) lone_concept = models.Concept(cause=make_mice(), effect=make_mice(), mechanism=(0, 1)) # lone concept -> null concept - compute.constellation_distance((lone_concept,), ()) + compute.ces_distance((lone_concept,), ()) assert mock_emd_distance.called is False assert mock_simple_distance.called is True mock_simple_distance.reset_mock() other_concept = models.Concept(cause=make_mice(), effect=make_mice(), mechanism=(0, 1, 2)) - # different concepts in constellation - compute.constellation_distance((lone_concept,), (other_concept,)) + # different concepts in CES + compute.ces_distance((lone_concept,), (other_concept,)) assert mock_emd_distance.called is True assert mock_simple_distance.called is False -def test_constellation_distance_switches_to_small_phi_difference(s): - mip = compute.big_mip(s) - constellations = (mip.unpartitioned_constellation, - mip.partitioned_constellation) +def test_ces_distance_switches_to_small_phi_difference(s): + sia = compute.sia(s) + ce_structures = (sia.ces, sia.partitioned_ces) - with config.override( - USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE=False): - assert 2.3125 == compute.constellation_distance(*constellations) + with config.override(USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE=False): + assert compute.ces_distance(*ce_structures) == 2.3125 - with config.override( - USE_SMALL_PHI_DIFFERENCE_FOR_CONSTELLATION_DISTANCE=True): - assert 1.083333 == compute.constellation_distance(*constellations) + with config.override(USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE=True): + assert compute.ces_distance(*ce_structures) == 1.083333 -@config.override(CACHE_BIGMIPS=True) -def test_big_mip_cache_key_includes_config_dependencies(s, flushcache, - restore_fs_cache): +@config.override(CACHE_SIAS=True) +def test_sia_cache_key_includes_config_dependencies(s, flushcache, + restore_fs_cache): flushcache() with config.override(MEASURE='EMD'): - emd_big_phi = compute.big_phi(s) + emd_big_phi = compute.phi(s) with config.override(MEASURE='L1'): - l1_big_phi = compute.big_phi(s) + l1_big_phi = compute.phi(s) assert l1_big_phi != emd_big_phi -def test_conceptual_information(s, flushcache, restore_fs_cache): +def test_clear_subsystem_caches_after_computing_sia_config_option(s): + with config.override(CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA=False, + PARALLEL_CONCEPT_EVALUATION=False, + PARALLEL_CUT_EVALUATION=False, + CACHE_REPERTOIRES=True): + sia = compute.sia(s) + assert s._repertoire_cache.cache + + with config.override(CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA=True, + PARALLEL_CONCEPT_EVALUATION=False, + PARALLEL_CUT_EVALUATION=False, + CACHE_REPERTOIRES=True): + sia = compute.sia(s) + assert not s._repertoire_cache.cache + + +def test_conceptual_info(s, flushcache, restore_fs_cache): flushcache() - assert compute.conceptual_information(s) == 2.8125 + assert compute.conceptual_info(s) == 2.8125 -def test_big_mip_empty_subsystem(s_empty, flushcache, restore_fs_cache): +def test_sia_empty_subsystem(s_empty, flushcache, restore_fs_cache): flushcache() - assert (compute.big_mip(s_empty) == - models.BigMip(phi=0.0, - unpartitioned_constellation=(), - partitioned_constellation=(), - subsystem=s_empty, - cut_subsystem=s_empty)) + assert (compute.sia(s_empty) == + models.SystemIrreducibilityAnalysis( + phi=0.0, + ces=(), + partitioned_ces=(), + subsystem=s_empty, + cut_subsystem=s_empty)) -def test_big_mip_disconnected_network(reducible, flushcache, restore_fs_cache): +def test_sia_disconnected_network(reducible, flushcache, restore_fs_cache): flushcache() - assert (compute.big_mip(reducible) == - models.BigMip(subsystem=reducible, cut_subsystem=reducible, - phi=0.0, unpartitioned_constellation=[], - partitioned_constellation=[])) + assert (compute.sia(reducible) == + models.SystemIrreducibilityAnalysis(subsystem=reducible, + cut_subsystem=reducible, + phi=0.0, + ces=[], + partitioned_ces=[])) -def test_big_mip_wrappers(reducible, flushcache, restore_fs_cache): +def test_sia_wrappers(reducible, flushcache, restore_fs_cache): flushcache() - assert (compute.big_mip(reducible) == - models.BigMip(subsystem=reducible, cut_subsystem=reducible, - phi=0.0, unpartitioned_constellation=[], - partitioned_constellation=[])) - assert compute.big_phi(reducible) == 0.0 + assert (compute.sia(reducible) == + models.SystemIrreducibilityAnalysis(subsystem=reducible, + cut_subsystem=reducible, + phi=0.0, + ces=[], + partitioned_ces=[])) + assert compute.phi(reducible) == 0.0 @config.override(SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI=True) @config.override(MEASURE='EMD') -def test_big_mip_single_micro_node_selfloops_have_phi( +def test_sia_single_micro_node_selfloops_have_phi( noisy_selfloop_single, flushcache, restore_fs_cache): flushcache() - assert compute.big_mip(noisy_selfloop_single).phi == 0.2736 + assert compute.sia(noisy_selfloop_single).phi == 0.2736 @config.override(SINGLE_MICRO_NODES_WITH_SELFLOOPS_HAVE_PHI=False) -def test_big_mip_single_micro_node_selfloops_dont_have_phi( +def test_sia_single_micro_node_selfloops_dont_have_phi( noisy_selfloop_single, flushcache, restore_fs_cache): flushcache() - assert compute.big_mip(noisy_selfloop_single).phi == 0.0 + assert compute.sia(noisy_selfloop_single).phi == 0.0 -def test_big_mip_single_micro_nodes_without_selfloops_dont_have_phi( +def test_sia_single_micro_nodes_without_selfloops_dont_have_phi( s_single, flushcache, restore_fs_cache): flushcache() - assert compute.big_mip(s_single).phi == 0.0 + assert compute.sia(s_single).phi == 0.0 @pytest.fixture -def standard_FindMip(s): - unpartitioned_constellation = constellation(s) - cuts = big_mip_bipartitions(s.node_indices) - return FindMip(cuts, s, unpartitioned_constellation) +def standard_ComputeSystemIrreducibility(s): + ces = compute.ces(s) + cuts = sia_bipartitions(s.node_indices) + return ComputeSystemIrreducibility(cuts, s, ces) @config.override(PARALLEL_CUT_EVALUATION=False) -def test_find_mip_sequential_standard_example(standard_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_sequential_standard_example( + standard_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = standard_FindMip.run_sequential() - check_mip(mip, standard_answer) + sia = standard_ComputeSystemIrreducibility.run_sequential() + check_sia(sia, standard_answer) @config.override(PARALLEL_CUT_EVALUATION=True, NUMBER_OF_CORES=-2) -def test_find_mip_parallel_standard_example(standard_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_parallel_standard_example( + standard_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = standard_FindMip.run_parallel() - check_mip(mip, standard_answer) + sia = standard_ComputeSystemIrreducibility.run_parallel() + check_sia(sia, standard_answer) @pytest.fixture -def s_noised_FindMip(s_noised): - unpartitioned_constellation = constellation(s_noised) - cuts = big_mip_bipartitions(s_noised.node_indices) - return FindMip(cuts, s_noised, unpartitioned_constellation) +def s_noised_ComputeSystemIrreducibility(s_noised): + ces = compute.ces(s_noised) + cuts = sia_bipartitions(s_noised.node_indices) + return ComputeSystemIrreducibility(cuts, s_noised, ces) @config.override(PARALLEL_CUT_EVALUATION=False) -def test_find_mip_sequential_noised_example(s_noised_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_sequential_noised_example( + s_noised_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = s_noised_FindMip.run_sequential() - check_mip(mip, noised_answer) + sia = s_noised_ComputeSystemIrreducibility.run_sequential() + check_sia(sia, noised_answer) @config.override(PARALLEL_CUT_EVALUATION=True, NUMBER_OF_CORES=-2) -def test_find_mip_parallel_noised_example(s_noised_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_parallel_noised_example( + s_noised_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = s_noised_FindMip.run_parallel() - check_mip(mip, noised_answer) + sia = s_noised_ComputeSystemIrreducibility.run_parallel() + check_sia(sia, noised_answer) @pytest.fixture -def micro_s_FindMip(micro_s): - unpartitioned_constellation = constellation(micro_s) - cuts = big_mip_bipartitions(micro_s.node_indices) - return FindMip(cuts, micro_s, unpartitioned_constellation) +def micro_s_ComputeSystemIrreducibility(micro_s): + ces = compute.ces(micro_s) + cuts = sia_bipartitions(micro_s.node_indices) + return ComputeSystemIrreducibility(cuts, micro_s, ces) @config.override(PARALLEL_CUT_EVALUATION=True) -def test_find_mip_parallel_micro(micro_s_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_parallel_micro( + micro_s_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = micro_s_FindMip.run_parallel() - check_mip(mip, micro_answer) + sia = micro_s_ComputeSystemIrreducibility.run_parallel() + check_sia(sia, micro_answer) @config.override(PARALLEL_CUT_EVALUATION=False) -def test_find_mip_sequential_micro(micro_s_FindMip, flushcache, - restore_fs_cache): +def test_find_sia_sequential_micro( + micro_s_ComputeSystemIrreducibility, flushcache, restore_fs_cache): flushcache() - mip = micro_s_FindMip.run_sequential() - check_mip(mip, micro_answer) + sia = micro_s_ComputeSystemIrreducibility.run_sequential() + check_sia(sia, micro_answer) def test_possible_complexes(s): @@ -445,14 +438,14 @@ def test_possible_complexes(s): def test_complexes_standard(s, flushcache, restore_fs_cache): flushcache() complexes = list(compute.complexes(s.network, s.state)) - check_mip(complexes[0], standard_answer) + check_sia(complexes[0], standard_answer) # TODO!! add more assertions for the smaller subsystems def test_all_complexes_standard(s, flushcache, restore_fs_cache): flushcache() complexes = list(compute.all_complexes(s.network, s.state)) - check_mip(complexes[0], standard_answer) + check_sia(complexes[0], standard_answer) @config.override(PARALLEL_CUT_EVALUATION=False) @@ -467,47 +460,47 @@ def test_all_complexes_parallelization(s, flushcache, restore_fs_cache): assert sorted(serial) == sorted(parallel) -def test_big_mip_complete_graph_standard_example(s_complete): - mip = compute.big_mip(s_complete) - check_mip(mip, standard_answer) +def test_sia_complete_graph_standard_example(s_complete): + sia = compute.sia(s_complete) + check_sia(sia, standard_answer) -def test_big_mip_complete_graph_s_noised(s_noised_complete): - mip = compute.big_mip(s_noised_complete) - check_mip(mip, noised_answer) +def test_sia_complete_graph_s_noised(s_noised_complete): + sia = compute.sia(s_noised_complete) + check_sia(sia, noised_answer) @pytest.mark.slow -def test_big_mip_complete_graph_big_subsys_all(big_subsys_all_complete): - mip = compute.big_mip(big_subsys_all_complete) - check_mip(mip, big_answer) +def test_sia_complete_graph_big_subsys_all(big_subsys_all_complete): + sia = compute.sia(big_subsys_all_complete) + check_sia(sia, big_answer) @pytest.mark.slow -def test_big_mip_complete_graph_rule152_s(rule152_s_complete): - mip = compute.big_mip(rule152_s_complete) - check_mip(mip, rule152_answer) +def test_sia_complete_graph_rule152_s(rule152_s_complete): + sia = compute.sia(rule152_s_complete) + check_sia(sia, rule152_answer) @pytest.mark.slow -def test_big_mip_big_network(big_subsys_all, flushcache, restore_fs_cache): +def test_sia_big_network(big_subsys_all, flushcache, restore_fs_cache): flushcache() - mip = compute.big_mip(big_subsys_all) - check_mip(mip, big_answer) + sia = compute.sia(big_subsys_all) + check_sia(sia, big_answer) -def test_big_mip_big_network_0_thru_3(big_subsys_0_thru_3, flushcache, - restore_fs_cache): +def test_sia_big_network_0_thru_3(big_subsys_0_thru_3, flushcache, + restore_fs_cache): flushcache() - mip = compute.big_mip(big_subsys_0_thru_3) - check_mip(mip, big_subsys_0_thru_3_answer) + sia = compute.sia(big_subsys_0_thru_3) + check_sia(sia, big_subsys_0_thru_3_answer) @pytest.mark.slow -def test_big_mip_rule152(rule152_s, flushcache, restore_fs_cache): +def test_sia_rule152(rule152_s, flushcache, restore_fs_cache): flushcache() - mip = compute.big_mip(rule152_s) - check_mip(mip, rule152_answer) + sia = compute.sia(rule152_s) + check_sia(sia, rule152_answer) # TODO fix this horribly outdated mess that never worked in the first place :P @@ -531,57 +524,56 @@ def test_rule152_complexes_no_caching(rule152): _flushdb() # Unpack the state from the results key. # Generate the network with the state we're testing. - net = Network(rule152.tpm, state, - connectivity_matrix=rule152.connectivity_matrix) + net = Network(rule152.tpm, state, cm=rule152.cm) # Comptue all the complexes, leaving out the first (empty) subsystem # since Matlab doesn't include it in results. complexes = list(compute.complexes(net))[1:] # Check the phi values of all complexes. - zz = [(bigmip.phi, result['subsystem_phis'][perm[i]]) for i, bigmip in + zz = [(sia.phi, result['subsystem_phis'][perm[i]]) for i, sia in list(enumerate(complexes))] - diff = [utils.eq(bigmip.phi, result['subsystem_phis'][perm[i]]) for - i, bigmip in list(enumerate(complexes))] - assert all(utils.eq(bigmip.phi, result['subsystem_phis'][perm[i]]) - for i, bigmip in list(enumerate(complexes))[:]) - # Check the main complex in particular. - main = compute.main_complex(net) - # Check the phi value of the main complex. - assert utils.eq(main.phi, result['phi']) + diff = [utils.eq(sia.phi, result['subsystem_phis'][perm[i]]) for + i, sia in list(enumerate(complexes))] + assert all(utils.eq(sia.phi, result['subsystem_phis'][perm[i]]) + for i, sia in list(enumerate(complexes))[:]) + # Check the major complex in particular. + major = compute.major_complex(net) + # Check the phi value of the major complex. + assert utils.eq(major.phi, result['phi']) # Check that the nodes are the same. - assert (main.subsystem.node_indices == - complexes[result['main_complex'] - 1].subsystem.node_indices) + assert (major.subsystem.node_indices == + complexes[result['major_complex'] - 1].subsystem.node_indices) # Check that the concept's phi values are the same. result_concepts = [c for c in result['concepts'] if c['is_irreducible']] - z = list(zip([c.phi for c in main.unpartitioned_constellation], + z = list(zip([c.phi for c in major.ces], [c['phi'] for c in result_concepts])) diff = [i for i in range(len(z)) if not utils.eq(z[i][0], z[i][1])] assert all(list(utils.eq(c.phi, result_concepts[i]['phi']) for i, c - in enumerate(main.unpartitioned_constellation))) + in enumerate(major.ces))) # Check that the minimal cut is the same. - assert main.cut == result['cut'] + assert major.cut == result['cut'] @pytest.mark.dev -def test_big_mip_macro(macro_s, flushcache, restore_fs_cache): +def test_sia_macro(macro_s, flushcache, restore_fs_cache): flushcache() - mip = compute.big_mip(macro_s) - check_mip(mip, macro_answer) + sia = compute.sia(macro_s) + check_sia(sia, macro_answer) -def test_parallel_and_sequential_constellations_are_equal(s, micro_s, macro_s): +def test_parallel_and_sequential_ces_are_equal(s, micro_s, macro_s): with config.override(PARALLEL_CONCEPT_EVALUATION=False): - c = compute.constellation(s) - c_micro = compute.constellation(micro_s) - c_macro = compute.constellation(macro_s) + c = compute.ces(s) + c_micro = compute.ces(micro_s) + c_macro = compute.ces(macro_s) with config.override(PARALLEL_CONCEPT_EVALUATION=True): - assert set(c) == set(compute.constellation(s)) - assert set(c_micro) == set(compute.constellation(micro_s)) - assert set(c_macro) == set(compute.constellation(macro_s)) + assert set(c) == set(compute.ces(s)) + assert set(c_micro) == set(compute.ces(micro_s)) + assert set(c_macro) == set(compute.ces(macro_s)) -def test_big_mip_bipartitions(): +def test_sia_bipartitions(): with config.override(CUT_ONE_APPROXIMATION=False): answer = [models.Cut((1,), (2, 3, 4)), models.Cut((2,), (1, 3, 4)), @@ -597,7 +589,7 @@ def test_big_mip_bipartitions(): models.Cut((3, 4), (1, 2)), models.Cut((1, 3, 4), (2,)), models.Cut((2, 3, 4), (1,))] - assert big_mip_bipartitions((1, 2, 3, 4)) == answer + assert sia_bipartitions((1, 2, 3, 4)) == answer with config.override(CUT_ONE_APPROXIMATION=True): answer = [models.Cut((1,), (2, 3, 4)), @@ -608,12 +600,12 @@ def test_big_mip_bipartitions(): models.Cut((1, 3, 4), (2,)), models.Cut((1, 2, 4), (3,)), models.Cut((1, 2, 3), (4,))] - assert big_mip_bipartitions((1, 2, 3, 4)) == answer + assert sia_bipartitions((1, 2, 3, 4)) == answer def test_system_cut_styles(s, flushcache, restore_fs_cache): with config.override(SYSTEM_CUTS='3.0_STYLE'): - assert compute.big_phi(s) == 2.3125 + assert compute.phi(s) == 2.3125 with config.override(SYSTEM_CUTS='CONCEPT_STYLE'): - assert compute.big_phi(s) == 0.6875 + assert compute.phi(s) == 0.6875 diff --git a/test/test_cache.py b/test/test_cache.py index bae5dab52..d8e10958e 100644 --- a/test/test_cache.py +++ b/test/test_cache.py @@ -33,7 +33,7 @@ def test_cache(): class SomeObject: - '''Object for testing cache decorator''' + """Object for testing cache decorator""" def __init__(self): self.my_cache = cache.DictCache() @@ -55,6 +55,49 @@ def test_cache_key_generation(): assert c.key('arg', _prefix='CONSTANT') == ('CONSTANT', 'arg') +def factory(): + """This function is necessary because CACHE_REPERTOIRES does not have an + effect if changed at runtime. + + .. TODO: + fix that + """ + class SomeObject: + """Object for testing CACHE_REPERTOIRES config option""" + def __init__(self): + self.repertoire_cache = cache.DictCache() + + @cache.method('repertoire_cache', 'cause') + def cause_repertoire(self, some_arg): + return 'expensive computation' + + @cache.method('repertoire_cache', 'effect') + def effect_repertoire(self, some_arg): + return 'expensive computation' + + return SomeObject + +def test_cache_repertoires_config_option(): + + with config.override(CACHE_REPERTOIRES=True): + SomeObject = factory() + o = SomeObject() + assert o.cause_repertoire(1) == 'expensive computation' + assert o.effect_repertoire(1) == 'expensive computation' + expected_key = ('cause', 1) + assert expected_key in o.repertoire_cache.cache + expected_key = ('effect', 1) + assert expected_key in o.repertoire_cache.cache + + with config.override(CACHE_REPERTOIRES=False): + SomeObject = factory() + o = SomeObject() + assert o.cause_repertoire(1) == 'expensive computation' + assert o.effect_repertoire(1) == 'expensive computation' + # Repertoire cache should be empty + assert not o.repertoire_cache.cache + + # Test MICE caching # ======================== @@ -74,15 +117,17 @@ def test_cache_key_generation(): # Decorator to force a test to use the local cache local_cache = config.override(REDIS_CACHE=False) -# Decorator to force a test to use Redis cache; skip test if Redis is not available +# Decorator to force a test to use Redis cache; skip test if Redis is not +# available redis_cache = lambda f: config.override(REDIS_CACHE=True)(require_redis(f)) def all_caches(test_func): - '''Decorator to run a test twice: once with the local cache and once with Redis. + """Decorator to run a test twice: once with the local cache and once with + Redis. Any decorated test must add a `redis_cache` argument. - ''' + """ @pytest.mark.parametrize("redis_cache,", [ require_redis((True,)), (False,), @@ -96,7 +141,7 @@ def wrapper(redis_cache, *args, **kwargs): @pytest.fixture def flush_redis(): - '''Fixture to flush and reset the Redis cache.''' + """Fixture to flush and reset the Redis cache.""" try: conn = cache.RedisConn() conn.flushall() @@ -125,32 +170,32 @@ def test_redis_cache_info(flush_redis): @redis_cache def test_use_redis_mice_cache(s): - c = cache.MiceCache(s) - assert isinstance(c, cache.RedisMiceCache) + c = cache.MICECache(s) + assert isinstance(c, cache.RedisMICECache) @local_cache def test_use_dict_mice_cache(s): - c = cache.MiceCache(s) - assert isinstance(c, cache.DictMiceCache) + c = cache.MICECache(s) + assert isinstance(c, cache.DictMICECache) def test_mice_cache_keys(s): - c = cache.DictMiceCache(s) - answer = (None, Direction.PAST, (0,), (0, 1)) - assert c.key(Direction.PAST, (0,), purviews=(0, 1)) == answer + c = cache.DictMICECache(s) + answer = (None, Direction.CAUSE, (0,), (0, 1)) + assert c.key(Direction.CAUSE, (0,), purviews=(0, 1)) == answer - c = cache.RedisMiceCache(s) - answer = 'subsys:{}:None:PAST:(0,):(0, 1)'.format(hash(s)) - assert c.key(Direction.PAST, (0,), purviews=(0, 1)) == answer + c = cache.RedisMICECache(s) + answer = 'subsys:{}:None:CAUSE:(0,):(0, 1)'.format(hash(s)) + assert c.key(Direction.CAUSE, (0,), purviews=(0, 1)) == answer @all_caches def test_mice_cache(redis_cache, flush_redis): s = examples.basic_subsystem() - mechanism = (1,) # has a core cause - mice = s.find_mice(Direction.PAST, mechanism) - key = s._mice_cache.key(Direction.PAST, mechanism) + mechanism = (1,) # has a MIC + mice = s.find_mice(Direction.CAUSE, mechanism) + key = s._mice_cache.key(Direction.CAUSE, mechanism) assert s._mice_cache.get(key) == mice @@ -158,7 +203,7 @@ def test_mice_cache(redis_cache, flush_redis): def test_do_not_cache_phi_zero_mice(): s = examples.basic_subsystem() mechanism = () # zero phi - mice = s.find_mice(Direction.PAST, mechanism) + mice = s.find_mice(Direction.CAUSE, mechanism) assert mice.phi == 0 # don't cache anything because mice.phi == 0 assert s._mice_cache.size() == 0 @@ -168,19 +213,19 @@ def test_do_not_cache_phi_zero_mice(): def test_only_cache_uncut_subsystem_mices(redis_cache, flush_redis, s): s = Subsystem(s.network, (1, 0, 0), s.node_indices, cut=models.Cut((1,), (0, 2))) - mechanism = (1,) # has a core cause - s.find_mice(Direction.PAST, mechanism) + mechanism = (1,) # has a MIC + s.find_mice(Direction.CAUSE, mechanism) # don't cache anything because subsystem is cut assert s._mice_cache.size() == 0 @all_caches def test_split_mechanism_mice_is_not_reusable(redis_cache, flush_redis): - '''If mechanism is split, then cached mice are not usable - when a cache is built from a parent cache.''' + """If mechanism is split, then cached mice are not usable + when a cache is built from a parent cache.""" s = examples.basic_subsystem() mechanism = (0, 1) - mice = s.find_mice(Direction.PAST, mechanism) + mice = s.find_mice(Direction.CAUSE, mechanism) assert s._mice_cache.size() == 1 # cached assert mice.purview == (1, 2) @@ -188,17 +233,18 @@ def test_split_mechanism_mice_is_not_reusable(redis_cache, flush_redis): cut = models.Cut((0,), (1, 2)) cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut, mice_cache=s._mice_cache) - key = cut_s._mice_cache.key(Direction.PAST, mechanism) + key = cut_s._mice_cache.key(Direction.CAUSE, mechanism) assert cut_s._mice_cache.get(key) is None @all_caches -def test_cut_relevant_connections_mice_is_not_reusable(redis_cache, flush_redis): - '''If relevant connections are cut, cached mice are not usable - when a cache is built from a parent cache.''' +def test_cut_relevant_connections_mice_is_not_reusable(redis_cache, + flush_redis): + """If relevant connections are cut, cached mice are not usable + when a cache is built from a parent cache.""" s = examples.basic_subsystem() mechanism = (1,) - mice = s.find_mice(Direction.PAST, mechanism) + mice = s.find_mice(Direction.CAUSE, mechanism) assert s._mice_cache.size() == 1 # cached assert mice.purview == (2,) @@ -206,17 +252,17 @@ def test_cut_relevant_connections_mice_is_not_reusable(redis_cache, flush_redis) cut = models.Cut((0, 2), (1,)) cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut, mice_cache=s._mice_cache) - key = cut_s._mice_cache.key(Direction.PAST, mechanism) + key = cut_s._mice_cache.key(Direction.CAUSE, mechanism) assert cut_s._mice_cache.get(key) is None @all_caches def test_inherited_mice_cache_keeps_unaffected_mice(redis_cache, flush_redis): - '''Cached Mice are saved from the parent cache if both - the mechanism and the relevant connections are not cut.''' + """Cached MICE are saved from the parent cache if both + the mechanism and the relevant connections are not cut.""" s = examples.basic_subsystem() mechanism = (1,) - mice = s.find_mice(Direction.PAST, mechanism) + mice = s.find_mice(Direction.CAUSE, mechanism) assert s._mice_cache.size() == 1 # cached assert mice.purview == (2,) @@ -224,26 +270,27 @@ def test_inherited_mice_cache_keeps_unaffected_mice(redis_cache, flush_redis): cut = models.Cut((0, 1), (2,)) cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut, mice_cache=s._mice_cache) - key = cut_s._mice_cache.key(Direction.PAST, mechanism) + key = cut_s._mice_cache.key(Direction.CAUSE, mechanism) assert cut_s._mice_cache.get(key) == mice @all_caches -def test_inherited_cache_must_come_from_uncut_subsystem(redis_cache, flush_redis): +def test_inherited_cache_must_come_from_uncut_subsystem(redis_cache, + flush_redis): s = examples.basic_subsystem() cut_s = Subsystem(s.network, s.state, s.node_indices, cut=models.Cut((0, 2), (1,))) with pytest.raises(ValueError): - cache.MiceCache(s, cut_s._mice_cache) + cache.MICECache(s, cut_s._mice_cache) @local_cache @config.override(MAXIMUM_CACHE_MEMORY_PERCENTAGE=0) def test_mice_cache_respects_cache_memory_limits(): s = examples.basic_subsystem() - c = cache.MiceCache(s) - mice = mock.Mock(phi=1) # dummy Mice - c.set(c.key(Direction.PAST, ()), mice) + c = cache.MICECache(s) + mice = mock.Mock(phi=1) # dummy MICE + c.set(c.key(Direction.CAUSE, ()), mice) assert c.size() == 0 @@ -252,7 +299,7 @@ def test_mice_cache_respects_cache_memory_limits(): @config.override(CACHE_POTENTIAL_PURVIEWS=True) def test_purview_cache(standard): - purviews = standard.potential_purviews(Direction.FUTURE, (0,)) + purviews = standard.potential_purviews(Direction.EFFECT, (0,)) assert standard.purview_cache.size() == 1 assert purviews in standard.purview_cache.cache.values() @@ -260,5 +307,5 @@ def test_purview_cache(standard): @config.override(CACHE_POTENTIAL_PURVIEWS=False) def test_only_cache_purviews_if_configured(): c = cache.PurviewCache() - c.set(c.key(Direction.PAST, (0,)), ('some purview')) + c.set(c.key(Direction.CAUSE, (0,)), ('some purview')) assert c.size() == 0 diff --git a/test/test_concept_style_cuts.py b/test/test_concept_style_cuts.py index ce23531ad..39463ce37 100644 --- a/test/test_concept_style_cuts.py +++ b/test/test_concept_style_cuts.py @@ -4,36 +4,40 @@ import pytest from pyphi import Direction, compute, config -from pyphi.compute import BigMipConceptStyle, ConceptStyleSystem, concept_cuts +from pyphi.compute import (ConceptStyleSystem, + SystemIrreducibilityAnalysisConceptStyle, + concept_cuts) from pyphi.models import KCut, KPartition, Part -from test_models import bigmip +from test_models import sia @pytest.fixture() -def kcut_past(): - partition = KPartition(Part((0, 2), (0,)), Part((), (2,)), Part((3,), (3,))) - return KCut(Direction.PAST, partition) +def kcut_cause(): + partition = KPartition( + Part((0, 2), (0,)), Part((), (2,)), Part((3,), (3,))) + return KCut(Direction.CAUSE, partition) @pytest.fixture() -def kcut_future(): - partition = KPartition(Part((0, 2), (0,)), Part((), (2,)), Part((3,), (3,))) - return KCut(Direction.FUTURE, partition) +def kcut_effect(): + partition = KPartition( + Part((0, 2), (0,)), Part((), (2,)), Part((3,), (3,))) + return KCut(Direction.EFFECT, partition) -def test_cut_indices(kcut_past, kcut_future): - assert kcut_past.indices == (0, 2, 3) - assert kcut_future.indices == (0, 2, 3) +def test_cut_indices(kcut_cause, kcut_effect): + assert kcut_cause.indices == (0, 2, 3) + assert kcut_effect.indices == (0, 2, 3) -def test_apply_cut(kcut_past, kcut_future): +def test_apply_cut(kcut_cause, kcut_effect): cm = np.ones((4, 4)) cut_cm = np.array([ [1, 1, 1, 0], [1, 1, 1, 1], [0, 1, 0, 0], [0, 1, 0, 1]]) - assert np.array_equal(kcut_past.apply_cut(cm), cut_cm) + assert np.array_equal(kcut_cause.apply_cut(cm), cut_cm) cm = np.ones((4, 4)) cut_cm = np.array([ @@ -41,101 +45,100 @@ def test_apply_cut(kcut_past, kcut_future): [1, 1, 1, 1], [1, 1, 0, 0], [0, 1, 0, 1]]) - assert np.array_equal(kcut_future.apply_cut(cm), cut_cm) + assert np.array_equal(kcut_effect.apply_cut(cm), cut_cm) -def test_cut_matrix(kcut_past, kcut_future): - assert np.array_equal(kcut_past.cut_matrix(4), np.array([ +def test_cut_matrix(kcut_cause, kcut_effect): + assert np.array_equal(kcut_cause.cut_matrix(4), np.array([ [0, 0, 0, 1], [0, 0, 0, 0], [1, 0, 1, 1], [1, 0, 1, 0]])) - assert np.array_equal(kcut_future.cut_matrix(4), np.array([ + assert np.array_equal(kcut_effect.cut_matrix(4), np.array([ [0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 1, 1], [1, 0, 1, 0]])) -def test_splits_mechanism(kcut_past): - assert kcut_past.splits_mechanism((0, 3)) - assert kcut_past.splits_mechanism((2, 3)) - assert not kcut_past.splits_mechanism((0,)) - assert not kcut_past.splits_mechanism((3,)) +def test_splits_mechanism(kcut_cause): + assert kcut_cause.splits_mechanism((0, 3)) + assert kcut_cause.splits_mechanism((2, 3)) + assert not kcut_cause.splits_mechanism((0,)) + assert not kcut_cause.splits_mechanism((3,)) -def test_all_cut_mechanisms(kcut_past): - assert kcut_past.all_cut_mechanisms() == ( +def test_all_cut_mechanisms(kcut_cause): + assert kcut_cause.all_cut_mechanisms() == ( (2,), (0, 2), (0, 3), (2, 3), (0, 2, 3)) @config.override(PARTITION_TYPE='TRI') def test_concept_style_cuts(): - assert list(concept_cuts(Direction.PAST, (0,))) == [ - KCut(Direction.PAST, KPartition(Part((), ()), Part((), (0,)), - Part((0,), ())))] + assert list(concept_cuts(Direction.CAUSE, (0,))) == [ + KCut(Direction.CAUSE, KPartition( + Part((), ()), Part((), (0,)), Part((0,), ())))] + assert list(concept_cuts(Direction.EFFECT, (0,))) == [ + KCut(Direction.EFFECT, KPartition( + Part((), ()), Part((), (0,)), Part((0,), ())))] - assert list(concept_cuts(Direction.FUTURE, (0,))) == [ - KCut(Direction.FUTURE, KPartition(Part((), ()), Part((), (0,)), - Part((0,), ())))] +def test_kcut_equality(kcut_cause, kcut_effect): + other = KCut(Direction.CAUSE, KPartition( + Part((0, 2), (0,)), Part((), (2,)), Part((3,), (3,)))) + assert kcut_cause == other + assert hash(kcut_cause) == hash(other) + assert hash(kcut_cause) != hash(kcut_cause.partition) -def test_kcut_equality(kcut_past, kcut_future): - other = KCut(Direction.PAST, KPartition(Part((0, 2), (0,)), Part((), (2,)), - Part((3,), (3,)))) - assert kcut_past == other - assert hash(kcut_past) == hash(other) - assert hash(kcut_past) != hash(kcut_past.partition) - - assert kcut_past != kcut_future - assert hash(kcut_past) != hash(kcut_future) + assert kcut_cause != kcut_effect + assert hash(kcut_cause) != hash(kcut_effect) def test_system_accessors(s): - cut_past = KCut(Direction.PAST, KPartition(Part((0, 2), (0, 1)), - Part((1,), (2,)))) - cs_past = ConceptStyleSystem(s, Direction.PAST, cut_past) - assert cs_past.cause_system.cut == cut_past - assert not cs_past.effect_system.is_cut + cut_cause = KCut(Direction.CAUSE, KPartition( + Part((0, 2), (0, 1)), Part((1,), (2,)))) + cs_cause = ConceptStyleSystem(s, Direction.CAUSE, cut_cause) + assert cs_cause.cause_system.cut == cut_cause + assert not cs_cause.effect_system.is_cut - cut_future = KCut(Direction.FUTURE, KPartition(Part((0, 2), (0, 1)), - Part((1,), (2,)))) - cs_future = ConceptStyleSystem(s, Direction.FUTURE, cut_future) - assert not cs_future.cause_system.is_cut - assert cs_future.effect_system.cut == cut_future + cut_effect = KCut(Direction.EFFECT, KPartition( + Part((0, 2), (0, 1)), Part((1,), (2,)))) + cs_effect = ConceptStyleSystem(s, Direction.EFFECT, cut_effect) + assert not cs_effect.cause_system.is_cut + assert cs_effect.effect_system.cut == cut_effect -def big_mip_cs(phi=1.0, subsystem=None): - return BigMipConceptStyle( - mip_past=bigmip(phi=phi, subsystem=subsystem), - mip_future=bigmip(phi=phi, subsystem=subsystem)) +def sia_cs(phi=1.0, subsystem=None): + return SystemIrreducibilityAnalysisConceptStyle( + sia_cause=sia(phi=phi, subsystem=subsystem), + sia_effect=sia(phi=phi, subsystem=subsystem)) -def test_big_mip_concept_style_ordering(s, subsys_n0n2, s_noised): - assert big_mip_cs(subsystem=s) == big_mip_cs(subsystem=s) - assert big_mip_cs(phi=1, subsystem=s) < big_mip_cs(phi=2, subsystem=s) +def test_sia_concept_style_ordering(s, subsys_n0n2, s_noised): + assert sia_cs(subsystem=s) == sia_cs(subsystem=s) + assert sia_cs(phi=1, subsystem=s) < sia_cs(phi=2, subsystem=s) - assert big_mip_cs(subsystem=s) >= big_mip_cs(subsystem=subsys_n0n2) + assert sia_cs(subsystem=s) >= sia_cs(subsystem=subsys_n0n2) with pytest.raises(TypeError): - big_mip_cs(subsystem=s) < big_mip_cs(subsystem=s_noised) + sia_cs(subsystem=s) < sia_cs(subsystem=s_noised) -def test_big_mip_concept_style(s): - mip = compute.big_mip_concept_style(s) - assert mip.min_mip is mip.big_mip_future - for attr in ['phi', 'unpartitioned_constellation', 'cut', 'subsystem', - 'cut_subsystem', 'network', 'partitioned_constellation']: - assert getattr(mip, attr) is getattr(mip.big_mip_future, attr) +def test_sia_concept_style(s): + sia = compute.sia_concept_style(s) + assert sia.min_sia is sia.sia_effect + for attr in ['phi', 'ces', 'cut', 'subsystem', + 'cut_subsystem', 'network', 'partitioned_ces']: + assert getattr(sia, attr) is getattr(sia.sia_effect, attr) @config.override(SYSTEM_CUTS='CONCEPT_STYLE') def test_unpickle(s, flushcache, restore_fs_cache): - bm = compute.big_mip(s) + bm = compute.sia(s) pickle.loads(pickle.dumps(bm)) @config.override(SYSTEM_CUTS='CONCEPT_STYLE') def test_concept_style_phi(s, flushcache, restore_fs_cache): - assert compute.big_phi(s) == 0.6875 + assert compute.phi(s) == 0.6875 diff --git a/test/test_convert.py b/test/test_convert.py index b58d4e9b2..a17d3f595 100644 --- a/test/test_convert.py +++ b/test/test_convert.py @@ -7,16 +7,16 @@ from pyphi import convert -def test_loli_index2state(): - assert convert.loli_index2state(7, 8) == (1, 1, 1, 0, 0, 0, 0, 0) - assert convert.loli_index2state(1, 3) == (1, 0, 0) - assert convert.loli_index2state(8, 4) == (0, 0, 0, 1) +def test_le_index2state(): + assert convert.le_index2state(7, 8) == (1, 1, 1, 0, 0, 0, 0, 0) + assert convert.le_index2state(1, 3) == (1, 0, 0) + assert convert.le_index2state(8, 4) == (0, 0, 0, 1) -def test_holi_index2state(): - assert convert.holi_index2state(7, 8) == (0, 0, 0, 0, 0, 1, 1, 1) - assert convert.holi_index2state(1, 3) == (0, 0, 1) - assert convert.holi_index2state(8, 4) == (1, 0, 0, 0) +def test_be_index2state(): + assert convert.be_index2state(7, 8) == (0, 0, 0, 0, 0, 1, 1, 1) + assert convert.be_index2state(1, 3) == (0, 0, 1) + assert convert.be_index2state(8, 4) == (1, 0, 0, 0) state_by_node = np.array([ @@ -100,33 +100,33 @@ def test_holi_index2state(): ]) -def test_to_n_dimensional(): +def test_to_multidimensional(): # Identity - assert np.array_equal(convert.to_n_dimensional(nd_state_by_node), + assert np.array_equal(convert.to_multidimensional(nd_state_by_node), nd_state_by_node) for tpm in [state_by_node, twod_state_by_node]: N = tpm.shape[-1] S = tpm.shape[0] - result = convert.to_n_dimensional(tpm) + result = convert.to_multidimensional(tpm) for i in range(S): - state = convert.loli_index2state(i, N) + state = convert.le_index2state(i, N) assert np.array_equal(result[state], tpm[i]) -def test_to_2_dimensional(): +def test_to_2dimensional(): # Identity - assert np.array_equal(convert.to_2_dimensional(state_by_node), + assert np.array_equal(convert.to_2dimensional(state_by_node), state_by_node) # Idempotency for tpm in [state_by_node, state_by_node_nondet, twod_state_by_node]: - nd = convert.to_n_dimensional(tpm) - assert np.array_equal(convert.to_2_dimensional(nd), tpm) + nd = convert.to_multidimensional(tpm) + assert np.array_equal(convert.to_2dimensional(nd), tpm) def test_state_by_state2state_by_node(): result = convert.state_by_state2state_by_node(state_by_state) - expected = convert.to_n_dimensional(state_by_node) + expected = convert.to_multidimensional(state_by_node) print("Result:") print(result) print("Expected:") @@ -169,7 +169,7 @@ def test_nondet_state_by_node2state_by_state(): def test_nondet_state_by_state2state_by_node(): # Test for nondeterministic TPM. result = convert.state_by_state2state_by_node(state_by_state_nondet) - expected = convert.to_n_dimensional(state_by_node_nondet) + expected = convert.to_multidimensional(state_by_node_nondet) print("Result:") print(result) print("Expected:") @@ -190,7 +190,7 @@ def test_2_d_state_by_node2state_by_state(): def test_n_d_state_by_node2state_by_state(): # Check with N-D form. - sbn = convert.to_n_dimensional(state_by_node) + sbn = convert.to_multidimensional(state_by_node) result = convert.state_by_node2state_by_state(sbn) expected = state_by_state print("Result:") diff --git a/test/test_direction.py b/test/test_direction.py index 51cbebf7a..c1fbf0b0d 100644 --- a/test/test_direction.py +++ b/test/test_direction.py @@ -10,8 +10,8 @@ def test_direction_order(): mechanism = (0,) purview = (1, 2) - assert Direction.PAST.order(mechanism, purview) == (purview, mechanism) - assert Direction.FUTURE.order(mechanism, purview) == (mechanism, purview) + assert Direction.CAUSE.order(mechanism, purview) == (purview, mechanism) + assert Direction.EFFECT.order(mechanism, purview) == (mechanism, purview) with pytest.raises(ValueError): Direction.BIDIRECTIONAL.order(mechanism, purview) diff --git a/test/test_distance.py b/test/test_distance.py index e0ea98dde..759103e5f 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -124,12 +124,12 @@ def test_default_asymmetric_measures(): assert set(distance.measures.asymmetric()) == set(['KLD', 'MP2Q', 'BLD']) -def test_big_phi_measure_must_be_symmetric(): +def test_system_repertoire_distance_must_be_symmetric(): a = np.ones((2, 2, 2)) / 8 b = np.ones((2, 2, 2)) / 8 with config.override(MEASURE='KLD'): with pytest.raises(ValueError): - distance.big_phi_measure(a, b) + distance.system_repertoire_distance(a, b) def test_suppress_np_warnings(): diff --git a/test/test_distribution.py b/test/test_distribution.py index d94f8f0ec..886bcf227 100644 --- a/test/test_distribution.py +++ b/test/test_distribution.py @@ -108,6 +108,6 @@ def test_flatten(): assert np.array_equal(distribution.flatten(repertoire), [0.1, 0.2, 0.0, 0.7]) - assert np.array_equal(distribution.flatten(repertoire, holi=True), + assert np.array_equal(distribution.flatten(repertoire, big_endian=True), [0.1, 0.0, 0.2, 0.7]) - assert distribution.flatten(None) == None + assert distribution.flatten(None) is None diff --git a/test/test_json.py b/test/test_json.py index b068334b8..d434e903c 100644 --- a/test/test_json.py +++ b/test/test_json.py @@ -8,8 +8,8 @@ import numpy as np import pytest -from pyphi import (Direction, actual, compute, config, constants, exceptions, - jsonify, models, network) +from pyphi import (Direction, actual, compute, config, exceptions, jsonify, + models, network) from test_actual import transition @@ -47,7 +47,7 @@ def test_jsonify_numpy(): def test_json_deserialization(s, transition): objects = [ - Direction.PAST, + Direction.CAUSE, s.network, # Network s, # Subsystem models.Bipartition(models.Part((0,), ()), models.Part((1,), (2, 3))), @@ -56,16 +56,17 @@ def test_json_deserialization(s, transition): models.Part((3,), (4,))), models.Cut((0,), (2,)), models.NullCut((0, 1)), - models.KCut(Direction.PAST, models.KPartition(models.Part((0,), ()), - models.Part((1,), (2, 3)))), + models.KCut(Direction.CAUSE, + models.KPartition(models.Part((0,), ()), + models.Part((1,), (2, 3)))), s.concept((1, 2)), s.concept((1,)), - compute.constellation(s), - compute.big_mip(s), + compute.ces(s), + compute.sia(s), transition, transition.find_actual_cause((0,), (0,)), actual.account(transition), - actual.big_acmip(transition) + actual.sia(transition) ] for o in objects: loaded = jsonify.loads(jsonify.dumps(o)) @@ -83,19 +84,19 @@ def __init__(self, x): def test_deserialization_memoizes_duplicate_objects(s): with config.override(PARALLEL_CUT_EVALUATION=True): - big_mip = compute.big_mip(s) + sia = compute.sia(s) - s1 = big_mip.subsystem + s1 = sia.subsystem # Computed in a parallel process, so has a different id - s2 = big_mip.unpartitioned_constellation[0].subsystem - assert not s1 is s2 + s2 = sia.ces[0].subsystem + assert s1 is not s2 assert s1 == s2 assert hash(s1) == hash(s2) - loaded = jsonify.loads(jsonify.dumps(big_mip)) + loaded = jsonify.loads(jsonify.dumps(sia)) l1 = loaded.subsystem - l2 = loaded.unpartitioned_constellation[0].subsystem + l2 = loaded.ces[0].subsystem assert l1 == l2 assert hash(l1) == hash(l2) assert l1 is l2 diff --git a/test/test_macro.py b/test/test_macro.py index e92e17c4b..c9f310f61 100644 --- a/test/test_macro.py +++ b/test/test_macro.py @@ -106,15 +106,16 @@ def test_make_mapping(): def test_make_macro_tpm(): answer_tpm = convert.state_by_state2state_by_node(np.array([ - [0.375, 0.375, 0.125, 0.125], - [0.375, 0.375, 0.125, 0.125], - [0.375, 0.375, 0.125, 0.125], - [0.375, 0.375, 0.125, 0.125], + [0.375, 0.375, 0.125, 0.125], + [0.375, 0.375, 0.125, 0.125], + [0.375, 0.375, 0.125, 0.125], + [0.375, 0.375, 0.125, 0.125], ])) partition = ((0,), (1, 2)) grouping = (((0,), (1,)), ((0, 1), (2,))) coarse_grain = macro.CoarseGrain(partition, grouping) - assert np.array_equal(coarse_grain.make_mapping(), [0, 1, 0, 1, 0, 1, 2, 3]) + assert np.array_equal(coarse_grain.make_mapping(), + [0, 1, 0, 1, 0, 1, 2, 3]) micro_tpm = np.zeros((8, 3)) + 0.5 macro_tpm = coarse_grain.macro_tpm(micro_tpm) @@ -127,10 +128,10 @@ def test_make_macro_tpm(): def test_make_macro_tpm_conditional_independence_check(): micro_tpm = np.array([ - [1, 0, 0, 0], - [0, .5, .5, 0], - [0, .5, .5, 0], - [0, 0, 0, 1], + [1, 0.0, 0.0, 0], + [0, 0.5, 0.5, 0], + [0, 0.5, 0.5, 0], + [0, 0.0, 0.0, 1], ]) partition = ((0,), (1,)) grouping = (((0,), (1,)), ((0,), (1,))) @@ -143,14 +144,14 @@ def test_make_macro_tpm_conditional_independence_check(): # TODO: make a fixture for this conditionally dependent TPM def test_macro_tpm_sbs(): micro_tpm = np.array([ - [1, 0, 0, 0, 0, 0, 0, 0], - [0, .5, .5, 0, 0, 0, 0, 0], - [0, .5, .5, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 0, 0, 0, 0], - [1, 0, 0, 0, 0, 0, 0, 0], - [0, .5, .5, 0, 0, 0, 0, 0], - [0, .5, .5, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 0, 0, 0, 0], + [1, 0.0, 0.0, 0, 0, 0, 0, 0], + [0, 0.5, 0.5, 0, 0, 0, 0, 0], + [0, 0.5, 0.5, 0, 0, 0, 0, 0], + [0, 0.0, 0.0, 1, 0, 0, 0, 0], + [1, 0.0, 0.0, 0, 0, 0, 0, 0], + [0, 0.5, 0.5, 0, 0, 0, 0, 0], + [0, 0.5, 0.5, 0, 0, 0, 0, 0], + [0, 0.0, 0.0, 1, 0, 0, 0, 0], ]) answer_tpm = np.array([ [1, 0, 0, 0 ], @@ -208,8 +209,8 @@ def bb(): @pytest.fixture def cg_bb(): - '''A blackbox with multiple outputs for a box, which must be coarse- - grained.''' + """A blackbox with multiple outputs for a box, which must be coarse- + grained.""" partition = ((1, 3), (4,), (5,)) output_indices = (1, 3, 4, 5) return macro.Blackbox(partition, output_indices) diff --git a/test/test_macro_blackbox.py b/test/test_macro_blackbox.py index 9731ffed3..2b7a92b66 100644 --- a/test/test_macro_blackbox.py +++ b/test/test_macro_blackbox.py @@ -100,7 +100,7 @@ def test_basic_nor_or(): state = (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0) - network = Network(tpm, connectivity_matrix=cm) + network = Network(tpm, cm=cm) # (0, 1, 2) compose the OR element, # (3, 4, 5) the COPY, @@ -115,10 +115,10 @@ def test_basic_nor_or(): blackbox=blackbox, time_scale=time) with config.override(CUT_ONE_APPROXIMATION=True): - mip = compute.big_mip(sub) + sia = compute.sia(sub) - assert mip.phi == 1.958332 - assert mip.cut == models.Cut((6,), (0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11)) + assert sia.phi == 1.958332 + assert sia.cut == models.Cut((6,), (0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11)) # After performing the 'ONE_CUT_APPROXIMATION' # The cut disrupts half of the connection from A (OR) to C (XOR). # It is able to do this because A 'enters' C from two different locations @@ -166,7 +166,7 @@ def test_xor_propogation_delay(): # The state of the system is all OFF state = (0, 0, 0, 0, 0, 0, 0, 0, 0) - network = Network(tpm, connectivity_matrix=cm) + network = Network(tpm, cm=cm) partition = ((0, 2, 7), (1, 3, 5), (4, 6, 8)) output_indices = (0, 3, 6) @@ -177,9 +177,9 @@ def test_xor_propogation_delay(): subsys = macro.MacroSubsystem(network, state, network.node_indices, blackbox=blackbox, time_scale=time) - big_mip = compute.big_mip(subsys) - assert big_mip.phi == 1.874999 - assert big_mip.cut == models.Cut((0,), (1, 2, 3, 4, 5, 6, 7, 8)) + sia = compute.sia(subsys) + assert sia.phi == 1.874999 + assert sia.cut == models.Cut((0,), (1, 2, 3, 4, 5, 6, 7, 8)) @pytest.mark.xfail @@ -225,11 +225,11 @@ def test_soup(): # State all OFF state = (0, 0, 0, 0, 0, 0) - assert compute.main_complex(network, state).phi == 0.125 + assert compute.major_complex(network, state).phi == 0.125 # With D ON (E must also be ON otherwise the state is unreachable) state = (0, 0, 0, 1, 1, 0) - assert compute.main_complex(network, state).phi == 0.215278 + assert compute.major_complex(network, state).phi == 0.215278 # Once the connection from D to B is frozen (with D in the ON state), we # recover the degeneracy example @@ -240,7 +240,7 @@ def test_soup(): time = 2 sub = macro.MacroSubsystem(network, state, (0, 1, 2, 3, 4, 5), blackbox=blackbox, time_scale=time) - assert compute.big_phi(sub) == 0.638888 + assert compute.phi(sub) == 0.638888 # When the connection from D to B is frozen (with D in the OFF state), # element B is inactivated and integration is compromised. @@ -251,7 +251,7 @@ def test_soup(): time = 2 sub = macro.MacroSubsystem(network, state, (0, 1, 2, 3, 4, 5), blackbox=blackbox, time_scale=time) - assert compute.big_phi(sub) == 0 + assert compute.phi(sub) == 0 @pytest.mark.slow @@ -292,7 +292,7 @@ def test_coarsegrain_spatial_degenerate(): net = Network(tpm, cm) - mc = compute.main_complex(net, state) + mc = compute.major_complex(net, state) assert mc.phi == 0.194445 partition = ((0, 1), (2, 3), (4, 5)) @@ -302,12 +302,12 @@ def test_coarsegrain_spatial_degenerate(): sub = macro.MacroSubsystem(net, state, range(net.size), coarse_grain=coarse) - mip = compute.big_mip(sub) - assert mip.phi == 0.834183 + sia = compute.sia(sub) + assert sia.phi == 0.834183 def test_degenerate(degenerate): - assert np.array_equal(degenerate.tpm, convert.to_n_dimensional(np.array([ + assert np.array_equal(degenerate.tpm, convert.to_multidimensional(np.array([ [0, 0], [0, 1], [1, 0], @@ -317,13 +317,13 @@ def test_degenerate(degenerate): [0, 1], [1, 0] ])) - mip = compute.big_mip(degenerate) - assert mip.phi == 0.638888 + sia = compute.sia(degenerate) + assert sia.phi == 0.638888 def test_basic_propagation_delay(s, propagation_delay): - # bb_mip = compute.big_mip(bb_sub) - # assert bb_mip.phi == 2.125 - # assert bb_mip.cut == models.Cut((0, 1, 2, 3, 4, 5, 6), (7,)) + # bb_sia = compute.sia(bb_sub) + # assert bb_sia.phi == 2.125 + # assert bb_sia.cut == models.Cut((0, 1, 2, 3, 4, 5, 6), (7,)) assert np.array_equal(propagation_delay.cm, s.cm) diff --git a/test/test_macro_subsystem.py b/test/test_macro_subsystem.py index d4dfc4ee1..6bb6da484 100644 --- a/test/test_macro_subsystem.py +++ b/test/test_macro_subsystem.py @@ -29,7 +29,7 @@ def macro_subsystem(): state = (0, 0, 0, 0) - network = pyphi.Network(tpm, connectivity_matrix=cm) + network = pyphi.Network(tpm, cm=cm) partition = ((0, 1), (2, 3)) grouping = (((0, 1), (2,)), ((0, 1), (2,))) @@ -80,7 +80,7 @@ def test_macro_subsystem(macro_subsystem): [1., 0.09], [1., 1.] ]) - assert np.array_equal(macro_subsystem.connectivity_matrix, answer_cm) + assert np.array_equal(macro_subsystem.cm, answer_cm) assert np.allclose(macro_subsystem.tpm.reshape([4] + [2], order='f'), answer_tpm, rtol=pyphi.constants.EPSILON) @@ -95,7 +95,7 @@ def test_macro_cut_subsystem(macro_subsystem): [1., 0.20083333], [1., 0.4225] ]) - assert np.array_equal(cut_subsystem.connectivity_matrix, answer_cm) + assert np.array_equal(cut_subsystem.cm, answer_cm) assert np.allclose(cut_subsystem.tpm.reshape([4] + [2], order='f'), answer_tpm, rtol=pyphi.constants.EPSILON) @@ -190,7 +190,7 @@ def test_run_tpm(): [1, 1, 1], [1, 1, 0], ]) - answer = convert.to_n_dimensional(np.array([ + answer = convert.to_multidimensional(np.array([ [0, 0, 0], [1, 1, 0], [1, 1, 1], @@ -373,7 +373,7 @@ def test_blackbox_partial_noise(s): # Noise connection from 2 -> 0 assert np.array_equal( noised.tpm, - convert.to_n_dimensional(np.array([ + convert.to_multidimensional(np.array([ [.5, 0, 0], [.5, 0, 1], [1., 0, 1], @@ -398,7 +398,7 @@ def test_blackbox_partial_noise(s): def test_blackbox_timescale(): # System is an OR gate and a COPY gate; the OR gate is connected with a # self loop. - tpm = convert.to_n_dimensional(np.array([ + tpm = convert.to_multidimensional(np.array([ [0, 0], [1, 1], [1, 0], diff --git a/test/test_models.py b/test/test_models.py index 4102e2a3d..91d7ef0fb 100644 --- a/test/test_models.py +++ b/test/test_models.py @@ -7,43 +7,57 @@ import numpy as np import pytest -from pyphi import Direction, Subsystem, config, constants, models +from pyphi import Direction, Subsystem, config, constants, exceptions, models + # Helper functions for constructing PyPhi objects # ----------------------------------------------- -def mip(phi=1.0, direction=None, mechanism=(), purview=(), partition=None, - unpartitioned_repertoire=None, partitioned_repertoire=None): - '''Build a ``Mip``.''' - return models.Mip(phi=phi, direction=direction, mechanism=mechanism, - purview=purview, partition=partition, - unpartitioned_repertoire=unpartitioned_repertoire, - partitioned_repertoire=partitioned_repertoire) +def ria(phi=1.0, direction=None, mechanism=(), purview=(), partition=None, + repertoire=None, partitioned_repertoire=None): + """Build a ``RepertoireIrreducibilityAnalysis``.""" + return models.RepertoireIrreducibilityAnalysis( + phi=phi, direction=direction, mechanism=mechanism, purview=purview, + partition=partition, repertoire=repertoire, + partitioned_repertoire=partitioned_repertoire + ) def mice(**kwargs): - '''Build a ``Mice``.''' - return models.Mice(mip(**kwargs)) + """Build a ``MaximallyIrreducibleCauseOrEffect``.""" + return models.MaximallyIrreducibleCauseOrEffect(ria(**kwargs)) + + +def mic(**kwargs): + """Build a ``MIC``.""" + return models.MaximallyIrreducibleCause(ria(**kwargs)) + + +def mie(**kwargs): + """Build a ``MIE``.""" + return models.MaximallyIrreducibleEffect(ria(**kwargs)) def concept(mechanism=(0, 1), cause_purview=(1,), effect_purview=(1,), phi=1.0, subsystem=None): - '''Build a ``Concept``.''' + """Build a ``Concept``.""" return models.Concept( mechanism=mechanism, - cause=mice(mechanism=mechanism, purview=cause_purview, phi=phi), - effect=mice(mechanism=mechanism, purview=effect_purview, phi=phi), + cause=mic(mechanism=mechanism, purview=cause_purview, phi=phi, + direction=Direction.CAUSE), + effect=mie(mechanism=mechanism, purview=effect_purview, phi=phi, + direction=Direction.EFFECT), subsystem=subsystem) -def bigmip(unpartitioned_constellation=(), partitioned_constellation=(), - subsystem=None, cut_subsystem=None, phi=1.0): - '''Build a ``BigMip``.''' +def sia(ces=(), partitioned_ces=(), subsystem=None, cut_subsystem=None, + phi=1.0): + """Build a ``SystemIrreducibilityAnalysis``.""" cut_subsystem = cut_subsystem or subsystem - return models.BigMip( - unpartitioned_constellation=unpartitioned_constellation, - partitioned_constellation=partitioned_constellation, + return models.SystemIrreducibilityAnalysis( + ces=ces, + partitioned_ces=partitioned_ces, subsystem=subsystem, cut_subsystem=cut_subsystem, phi=phi) @@ -280,58 +294,58 @@ def test_null_cut_equality(): # }}} -# Test MIP +# Test ria # {{{ -def test_mip_ordering_and_equality(): - assert mip(phi=1.0) < mip(phi=2.0) - assert mip(phi=2.0) > mip(phi=1.0) - assert mip(mechanism=(1,)) < mip(mechanism=(1, 2)) - assert mip(mechanism=(1, 2)) >= mip(mechanism=(1,)) - assert mip(purview=(1,)) < mip(purview=(1, 2)) - assert mip(purview=(1, 2)) >= mip(purview=(1,)) +def test_ria_ordering_and_equality(): + assert ria(phi=1.0) < ria(phi=2.0) + assert ria(phi=2.0) > ria(phi=1.0) + assert ria(mechanism=(1,)) < ria(mechanism=(1, 2)) + assert ria(mechanism=(1, 2)) >= ria(mechanism=(1,)) + assert ria(purview=(1,)) < ria(purview=(1, 2)) + assert ria(purview=(1, 2)) >= ria(purview=(1,)) - assert mip(phi=1.0) == mip(phi=1.0) - assert mip(phi=1.0) == mip(phi=(1.0 - constants.EPSILON / 2)) - assert mip(phi=1.0) != mip(phi=(1.0 - constants.EPSILON * 2)) - assert mip(direction=Direction.PAST) != mip(direction=Direction.FUTURE) - assert mip(mechanism=(1,)) != mip(mechanism=(1, 2)) + assert ria(phi=1.0) == ria(phi=1.0) + assert ria(phi=1.0) == ria(phi=(1.0 - constants.EPSILON / 2)) + assert ria(phi=1.0) != ria(phi=(1.0 - constants.EPSILON * 2)) + assert ria(direction=Direction.CAUSE) != ria(direction=Direction.EFFECT) + assert ria(mechanism=(1,)) != ria(mechanism=(1, 2)) with config.override(PICK_SMALLEST_PURVIEW=True): - assert mip(purview=(1, 2)) < mip(purview=(1,)) + assert ria(purview=(1, 2)) < ria(purview=(1,)) with pytest.raises(TypeError): - mip(direction=Direction.PAST) < mip(direction=Direction.FUTURE) + ria(direction=Direction.CAUSE) < ria(direction=Direction.EFFECT) with pytest.raises(TypeError): - mip(direction=Direction.PAST) >= mip(direction=Direction.FUTURE) + ria(direction=Direction.CAUSE) >= ria(direction=Direction.EFFECT) -def test_null_mip(): - direction = Direction.PAST +def test_null_ria(): + direction = Direction.CAUSE mechanism = (0,) purview = (1,) - unpartitioned_repertoire = 'repertoire' - null_mip = models._null_mip(direction, mechanism, purview, - unpartitioned_repertoire) - assert null_mip.direction == direction - assert null_mip.mechanism == mechanism - assert null_mip.purview == purview - assert null_mip.partition is None - assert null_mip.unpartitioned_repertoire == 'repertoire' - assert null_mip.partitioned_repertoire is None - assert null_mip.phi == 0 + repertoire = 'repertoire' + null_ria = models._null_ria(direction, mechanism, purview, + repertoire) + assert null_ria.direction == direction + assert null_ria.mechanism == mechanism + assert null_ria.purview == purview + assert null_ria.partition is None + assert null_ria.repertoire == 'repertoire' + assert null_ria.partitioned_repertoire is None + assert null_ria.phi == 0 -def test_mip_repr_str(): - print(repr(mip())) - print(str(mip())) +def test_ria_repr_str(): + print(repr(ria())) + print(str(ria())) # }}} -# Test MICE +# Test MaximallyIrreducibleCauseOrEffect # {{{ @@ -385,7 +399,7 @@ def test_mice_repr_str(): def test_relevant_connections(s, subsys_n1n2): - m = mice(mechanism=(0,), purview=(1,), direction=Direction.PAST) + m = mice(mechanism=(0,), purview=(1,), direction=Direction.CAUSE) answer = np.array([ [0, 0, 0], [1, 0, 0], @@ -393,7 +407,7 @@ def test_relevant_connections(s, subsys_n1n2): ]) assert np.array_equal(m._relevant_connections(s), answer) - m = mice(mechanism=(1,), purview=(1, 2), direction=Direction.FUTURE) + m = mice(mechanism=(1,), purview=(1, 2), direction=Direction.EFFECT) answer = np.array([ [0, 0, 0], [0, 1, 1], @@ -408,12 +422,12 @@ def test_damaged(s): cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut) # Cut splits mechanism: - m1 = mice(mechanism=(0, 1), purview=(1, 2), direction=Direction.FUTURE) + m1 = mice(mechanism=(0, 1), purview=(1, 2), direction=Direction.EFFECT) assert m1.damaged_by_cut(cut_s) assert not m1.damaged_by_cut(s) # Cut splits mechanism & purview (but not *only* mechanism) - m2 = mice(mechanism=(0,), purview=(1, 2), direction=Direction.FUTURE) + m2 = mice(mechanism=(0,), purview=(1, 2), direction=Direction.EFFECT) assert m2.damaged_by_cut(cut_s) assert not m2.damaged_by_cut(s) @@ -421,6 +435,22 @@ def test_damaged(s): # }}} +# Test MIC and MIE {{{ + +def test_mic_raises_wrong_direction(): + mic(direction=Direction.CAUSE, mechanism=(0,), purview=(1,)) + with pytest.raises(exceptions.WrongDirectionError): + mic(direction=Direction.EFFECT, mechanism=(0,), purview=(1,)) + + +def test_mie_raises_wrong_direction(): + mie(direction=Direction.EFFECT, mechanism=(0,), purview=(1,)) + with pytest.raises(exceptions.WrongDirectionError): + mie(direction=Direction.CAUSE, mechanism=(0,), purview=(1,)) + +# }}} + + # Test Concept # {{{ @@ -473,9 +503,9 @@ def test_concept_equality_effect_purview_nodes(s): def test_concept_equality_repertoires(s): phi = 1.0 - mice1 = mice(phi=phi, unpartitioned_repertoire=np.array([1, 2]), + mice1 = mice(phi=phi, repertoire=np.array([1, 2]), partitioned_repertoire=()) - mice2 = mice(phi=phi, unpartitioned_repertoire=np.array([0, 0]), + mice2 = mice(phi=phi, repertoire=np.array([0, 0]), partitioned_repertoire=None) concept = models.Concept(mechanism=(), cause=mice1, effect=mice2, subsystem=s) @@ -524,68 +554,60 @@ def test_concept_emd_eq(s, subsys_n1n2): # }}} -# Test Constellation +# Test CauseEffectStructure # {{{ -def test_constellation_is_still_a_tuple(): - c = models.Constellation([concept()]) +def test_ces_is_still_a_tuple(): + c = models.CauseEffectStructure([concept()]) assert len(c) == 1 @config.override(REPR_VERBOSITY=0) -def test_constellation_repr(): - c = models.Constellation() - assert repr(c) == "Constellation()" +def test_ces_repr(): + c = models.CauseEffectStructure() + assert repr(c) == "CauseEffectStructure()" -def test_constellation_repr_str(): - c = models.Constellation([concept()]) +def test_ces_repr_str(): + c = models.CauseEffectStructure([concept()]) repr(c) str(c) -def test_normalize_constellation(): - c1 = models.Concept(mechanism=(1,)) - c2 = models.Concept(mechanism=(2,)) - c3 = models.Concept(mechanism=(1, 3)) - c4 = models.Concept(mechanism=(1, 2, 3)) - assert (c1, c2, c3, c4) == models.normalize_constellation((c3, c4, c2, c1)) - - -def test_constellations_are_always_normalized(): +def test_ces_are_always_normalized(): c1 = models.Concept(mechanism=(1,)) c2 = models.Concept(mechanism=(2,)) c3 = models.Concept(mechanism=(1, 3)) c4 = models.Concept(mechanism=(1, 2, 3)) - assert (c1, c2, c3, c4) == models.Constellation((c3, c4, c2, c1)) + assert (c1, c2, c3, c4) == models.CauseEffectStructure((c3, c4, c2, c1)) # }}} -# Test BigMip +# Test SystemIrreducibilityAnalysis # {{{ -def test_bigmip_ordering(s, s_noised, subsys_n0n2, subsys_n1n2): - phi1 = bigmip(subsystem=s) - phi2 = bigmip(subsystem=s, phi=1.0 + constants.EPSILON * 2) +def test_sia_ordering(s, s_noised, subsys_n0n2, subsys_n1n2): + phi1 = sia(subsystem=s) + phi2 = sia(subsystem=s, phi=1.0 + constants.EPSILON * 2) assert phi1 < phi2 assert phi2 > phi1 assert phi1 <= phi2 assert phi2 >= phi1 - assert bigmip(subsystem=subsys_n0n2) < bigmip(subsystem=subsys_n1n2) + assert sia(subsystem=subsys_n0n2) < sia(subsystem=subsys_n1n2) - different_system = bigmip(subsystem=s_noised) + different_system = sia(subsystem=s_noised) with pytest.raises(TypeError): phi1 <= different_system with pytest.raises(TypeError): phi1 >= different_system -def test_bigmip_ordering_by_subsystem_size(s, s_single): - small = bigmip(subsystem=s_single) - big = bigmip(subsystem=s) +def test_sia_ordering_by_subsystem_size(s, s_single): + small = sia(subsystem=s_single) + big = sia(subsystem=s) assert small < big assert small <= big assert big > small @@ -593,16 +615,16 @@ def test_bigmip_ordering_by_subsystem_size(s, s_single): assert big != small -def test_bigmip_equality(s): - bm = bigmip(subsystem=s) - close_enough = bigmip(subsystem=s, phi=(1.0 - constants.EPSILON / 2)) - not_quite = bigmip(subsystem=s, phi=(1.0 - constants.EPSILON * 2)) +def test_sia_equality(s): + bm = sia(subsystem=s) + close_enough = sia(subsystem=s, phi=(1.0 - constants.EPSILON / 2)) + not_quite = sia(subsystem=s, phi=(1.0 - constants.EPSILON * 2)) assert bm == close_enough assert bm != not_quite -def test_bigmip_repr_str(s): - bm = bigmip(subsystem=s) +def test_sia_repr_str(s): + bm = sia(subsystem=s) print(repr(bm)) print(str(bm)) @@ -622,7 +644,7 @@ def test_indent(): class ReadableReprClass: - '''Dummy class for make_repr tests''' + """Dummy class for make_repr tests""" some_attr = 3.14 def __repr__(self): diff --git a/test/test_network.py b/test/test_network.py index 1eeaf8639..8e55300d4 100644 --- a/test/test_network.py +++ b/test/test_network.py @@ -28,18 +28,18 @@ def test_network_init_validation(network): def test_network_creates_fully_connected_cm_by_default(): - tpm = np.zeros((2*2*2, 3)) - network = Network(tpm, connectivity_matrix=None) + tpm = np.zeros((2 * 2 * 2, 3)) + network = Network(tpm, cm=None) target_cm = np.ones((3, 3)) - assert np.array_equal(network.connectivity_matrix, target_cm) + assert np.array_equal(network.cm, target_cm) def test_potential_purviews(s): mechanism = (0,) - assert (s.network.potential_purviews(Direction.PAST, mechanism) - == [(1,), (2,), (1, 2)]) - assert (s.network.potential_purviews(Direction.FUTURE, mechanism) - == [(2,)]) + assert (s.network.potential_purviews(Direction.CAUSE, mechanism) == + [(1,), (2,), (1, 2)]) + assert (s.network.potential_purviews(Direction.EFFECT, mechanism) == + [(2,)]) def test_node_labels(standard): diff --git a/test/test_parallel.py b/test/test_parallel.py index 8d2a15747..21568d1c2 100644 --- a/test/test_parallel.py +++ b/test/test_parallel.py @@ -72,7 +72,7 @@ def test_materialize_list_only_when_needed(): class MapError(MapSquare): - '''Raise an exception in the worker process.''' + """Raise an exception in the worker process.""" @staticmethod def compute(num): raise Exception("I don't wanna!") diff --git a/test/test_subsystem.py b/test/test_subsystem.py index ceeb665d6..d188184cd 100644 --- a/test/test_subsystem.py +++ b/test/test_subsystem.py @@ -7,7 +7,10 @@ import example_networks from pyphi import Direction, Network, config, exceptions -from pyphi.models import Bipartition, Cut, KPartition, Part, Tripartition +from pyphi.models import (Bipartition, Concept, Cut, KPartition, + MaximallyIrreducibleCause, + MaximallyIrreducibleEffect, Part, + RepertoireIrreducibilityAnalysis, Tripartition) from pyphi.subsystem import (Subsystem, all_partitions, mip_bipartitions, wedge_partitions) @@ -51,8 +54,8 @@ def test_empty_init(s): def test_eq(subsys_n0n2, subsys_n1n2): assert subsys_n0n2 == subsys_n0n2 assert subsys_n0n2 != subsys_n1n2 - assert subsys_n0n2 != None - assert None != subsys_n1n2 + assert subsys_n0n2 is not None + assert subsys_n1n2 is not None def test_cmp(subsys_n0n2, subsys_n1n2, s): @@ -127,11 +130,10 @@ def test_wedge_partitions(): def test_partitioned_repertoire_with_tripartition(s): - mechanism, purview = (0,), (1, 2) tripartition = Tripartition(Part((), (1,)), Part((0,), ()), Part((), (2,))) assert np.array_equal( - s.partitioned_repertoire(Direction.PAST, tripartition), + s.partitioned_repertoire(Direction.CAUSE, tripartition), np.array([[[0.25, 0.25], [0.25, 0.25]]])) @@ -139,17 +141,17 @@ def test_tripartitions_choses_smallest_purview(s): mechanism = (1, 2) with config.override(PICK_SMALLEST_PURVIEW=False): - effect = s.core_effect(mechanism) - assert effect.phi == 0.5 - assert effect.purview == (0, 1) + mie = s.mie(mechanism) + assert mie.phi == 0.5 + assert mie.purview == (0, 1) s.clear_caches() # In phi-tie, chose the smaller purview (0,) with config.override(PICK_SMALLEST_PURVIEW=True): - effect = s.core_effect(mechanism) - assert effect.phi == 0.5 - assert effect.purview == (0,) + mie = s.mie(mechanism) + assert mie.phi == 0.5 + assert mie.purview == (0,) def test_all_partitions(): @@ -217,3 +219,26 @@ def test_specify_elements_with_labels(standard): def test_indices2labels(s): assert s.indices2labels((1, 2)) == ('B', 'C') + + +def test_null_concept(s): + cause = MaximallyIrreducibleCause( + RepertoireIrreducibilityAnalysis( + repertoire=s.unconstrained_cause_repertoire(()), phi=0, + direction=Direction.CAUSE, mechanism=(), purview=(), + partition=None, partitioned_repertoire=None)) + effect = MaximallyIrreducibleEffect( + RepertoireIrreducibilityAnalysis( + repertoire=s.unconstrained_effect_repertoire(()), phi=0, + direction=Direction.EFFECT, mechanism=(), purview=(), + partition=None, partitioned_repertoire=None)) + assert (s.null_concept == + Concept(mechanism=(), cause=cause, effect=effect, subsystem=s)) + + +def test_concept_no_mechanism(s): + assert s.concept(()) == s.null_concept + + +def test_concept_nonexistent(s): + assert not s.concept((0, 2)) diff --git a/test/test_subsystem_cause_effect_repertoire.py b/test/test_subsystem_cause_effect_repertoire.py index b896c2759..3634c50a0 100644 --- a/test/test_subsystem_cause_effect_repertoire.py +++ b/test/test_subsystem_cause_effect_repertoire.py @@ -84,7 +84,7 @@ cut=Cut((2,), (0, 1))), [0], [1], - np.array([1/3, 2/3]).reshape(1, 2, 1, order="F") + np.array([1 / 3, 2 / 3]).reshape(1, 2, 1, order="F") ), # }}} # Subset, with cut {{{ @@ -122,7 +122,7 @@ simple_a_just_on, [0], [0], - # Cause repertoire is maximally selective; the past state must have + # Cause repertoire is maximally selective; the previous state must have # been {0,1,1}, so `expected[(0,1,1)]` should be 1 and everything else # should be 0 np.array([1.0, 0.0]).reshape(2, 1, 1, order="F") @@ -299,7 +299,7 @@ simple_a_just_on, [1], [0, 1, 2], - np.array([1., 0., 0., 0., 0., 0., 0., 0.]).reshape(2,2,2) + np.array([1., 0., 0., 0., 0., 0., 0., 0.]).reshape(2, 2, 2) ), ( 'effect_repertoire', simple_a_just_on, @@ -321,7 +321,7 @@ simple_all_off, [0], [0, 1, 2], - np.array([0.75, 0., 0., 0., 0.25, 0., 0., 0.]).reshape(2,2,2) + np.array([0.75, 0., 0., 0., 0.25, 0., 0., 0.]).reshape(2, 2, 2) ) # }}} # }}} @@ -335,7 +335,7 @@ @pytest.mark.parametrize(parameter_string, scenarios) def test_cause_and_effect_repertoire(function, subsystem, mechanism, purview, expected): - '''Test ``effect_repertoire`` or ``cause_repertoire``.''' + """Test ``effect_repertoire`` or ``cause_repertoire``.""" print("\nTesting " + function + " with subsystem \n" + str(subsystem)) @@ -349,9 +349,9 @@ def test_cause_and_effect_repertoire(function, subsystem, mechanism, purview, print("\nMechanism:".rjust(12), mechanism, "\nPurview:".rjust(12), purview, "\nCut:".rjust(12), subsystem.cut, "\n") - print('-'*40, "Result:", result, "\nResult Shape:", result.shape, '-'*40, - "Expected:", expected, "\nExpected Shape:", expected.shape, '-'*40, - sep="\n") + print('-' * 40, "Result:", result, "\nResult Shape:", result.shape, + '-' * 40, "Expected:", expected, "\nExpected Shape:", expected.shape, + '-' * 40, sep="\n") assert np.array_equal(result, expected) diff --git a/test/test_subsystem_expand.py b/test/test_subsystem_expand.py index 6f0540ded..d1abadd34 100644 --- a/test/test_subsystem_expand.py +++ b/test/test_subsystem_expand.py @@ -5,9 +5,8 @@ import numpy as np import pytest -from pyphi.compute import big_mip +from pyphi import Direction, compute from pyphi.constants import EPSILON -from pyphi import Direction CD = (2, 3) BCD = (1, 2, 3) @@ -15,25 +14,25 @@ def test_expand_cause_repertoire(micro_s_all_off): - mip = big_mip(micro_s_all_off) - A = mip.unpartitioned_constellation[0] - cause = A.cause.mip.unpartitioned_repertoire + sia = compute.sia(micro_s_all_off) + A = sia.ces[0] + cause = A.cause_repertoire assert np.all(abs(A.expand_cause_repertoire(CD) - cause) < EPSILON) assert np.all(abs( A.expand_cause_repertoire(BCD).flatten(order='F') - - np.array([1/6 if i < 6 else 0 for i in range(8)])) < EPSILON) + np.array([1 / 6 if i < 6 else 0 for i in range(8)])) < EPSILON) assert np.all(abs( A.expand_cause_repertoire(ABCD).flatten(order='F') - - np.array([1/12 if i < 12 else 0 for i in range(16)])) < EPSILON) + np.array([1 / 12 if i < 12 else 0 for i in range(16)])) < EPSILON) assert np.all(abs(A.expand_cause_repertoire(ABCD) - A.expand_cause_repertoire()) < EPSILON) def test_expand_effect_repertoire(micro_s_all_off): - mip = big_mip(micro_s_all_off) - A = mip.unpartitioned_constellation[0] - effect = A.effect.mip.unpartitioned_repertoire + sia = compute.sia(micro_s_all_off) + A = sia.ces[0] + effect = A.effect_repertoire assert np.all(abs(A.expand_effect_repertoire(CD) - effect) < EPSILON) assert np.all(abs(A.expand_effect_repertoire(BCD).flatten(order='F') - @@ -55,4 +54,4 @@ def test_expand_repertoire_purview_must_be_subset_of_new_purview(s): new_purview = (1,) cause_repertoire = s.cause_repertoire(mechanism, purview) with pytest.raises(ValueError): - s.expand_repertoire(Direction.PAST, cause_repertoire, new_purview) + s.expand_repertoire(Direction.CAUSE, cause_repertoire, new_purview) diff --git a/test/test_subsystem_phi_max.py b/test/test_subsystem_phi_max.py index 02962727b..5e1176ffe 100644 --- a/test/test_subsystem_phi_max.py +++ b/test/test_subsystem_phi_max.py @@ -7,14 +7,14 @@ import example_networks from pyphi import Direction, Subsystem -from pyphi.models import Cut, Mice, _null_mip +from pyphi.models import Cut, MaximallyIrreducibleCauseOrEffect, _null_ria from pyphi.utils import eq # Expected results {{{ # ==================== s = example_networks.s() -directions = (Direction.PAST, Direction.FUTURE) +directions = (Direction.CAUSE, Direction.EFFECT) cuts = (None, Cut((1, 2), (0,))) subsystem = { cut: Subsystem(s.network, s.state, s.node_indices, cut=cut) @@ -23,13 +23,13 @@ expected_purview_indices = { cuts[0]: { - Direction.PAST: { + Direction.CAUSE: { (1,): (2,), (2,): (0, 1), (0, 1): (1, 2), (0, 1, 2): (0, 1, 2) }, - Direction.FUTURE: { + Direction.EFFECT: { (1,): (0,), (2,): (1,), (0, 1): (2,), @@ -37,13 +37,13 @@ } }, cuts[1]: { - Direction.PAST: { + Direction.CAUSE: { (1,): (2,), (2,): (0, 1), (0, 1): (), (0, 1, 2): (), }, - Direction.FUTURE: { + Direction.EFFECT: { (1,): (2,), (2,): (1,), (0, 1): (2,), @@ -72,7 +72,7 @@ expected_mice = { cut: { direction: [ - Mice(mip) for mechanism, mip in + MaximallyIrreducibleCauseOrEffect(mip) for mechanism, mip in expected_mips[cut][direction].items() ] for direction in directions } for cut in cuts @@ -102,8 +102,10 @@ def test_find_mice(cut, direction, expected): def test_find_mice_empty(s): - expected = [Mice(_null_mip(direction, (), ())) for direction in - directions] + expected = [ + MaximallyIrreducibleCauseOrEffect(_null_ria(direction, (), ())) + for direction in directions + ] assert all(s.find_mice(mice.direction, mice.mechanism) == mice for mice in expected) @@ -114,20 +116,20 @@ def test_find_mice_empty(s): @pytest.mark.parametrize(mice_parameter_string, mice_scenarios) -def test_core_cause_or_effect(cut, direction, expected): - if direction == Direction.PAST: - core_ce = subsystem[cut].core_cause - elif direction == Direction.FUTURE: - core_ce = subsystem[cut].core_effect - assert core_ce(expected.mechanism) == expected +def test_mic_or_mie(cut, direction, expected): + if direction == Direction.CAUSE: + mice = subsystem[cut].mic + elif direction == Direction.EFFECT: + mice = subsystem[cut].mie + assert mice(expected.mechanism) == expected phi_max_scenarios = [ [ - (cut, past.mechanism, min(past.phi, future.phi)) - for past, future in zip(expected_mice[cut][Direction.PAST], - expected_mice[cut][Direction.FUTURE])] - for cut in cuts + (cut, cause.mechanism, min(cause.phi, effect.phi)) + for cause, effect in zip(expected_mice[cut][Direction.CAUSE], + expected_mice[cut][Direction.EFFECT]) + ] for cut in cuts ] # Flatten singly-nested list of scenarios. phi_max_scenarios = list(chain(*phi_max_scenarios)) diff --git a/test/test_subsystem_small_phi.py b/test/test_subsystem_small_phi.py index bc5e4c709..f509bfeb5 100644 --- a/test/test_subsystem_small_phi.py +++ b/test/test_subsystem_small_phi.py @@ -1,14 +1,12 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from pprint import pprint - import numpy as np import pytest import example_networks from pyphi import Direction, constants -from pyphi.models import Mip, Part +from pyphi.models import Part, RepertoireIrreducibilityAnalysis s = example_networks.s() @@ -19,7 +17,7 @@ # Test scenario structure: # # ( -# direction of MIP (Direction.PAST or Direction.FUTURE), +# direction of RIA (Direction.CAUSE or Direction.EFFECT), # subsystem, cut, # mechanism, # purview, @@ -36,12 +34,12 @@ # } scenarios = [ -# Past {{{ +# Previous {{{ # ~~~~~~~~ # No cut {{{ # ---------- ( - Direction.PAST, + Direction.CAUSE, s, None, (0,), (0,), @@ -50,7 +48,7 @@ Part(mechanism=(0,), purview=())): np.array([0.5, 0.5]).reshape(2, 1, 1, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([0.5, 0.5]).reshape(2, 1, 1, order="F"), 'phi': 0.0} ), @@ -58,7 +56,7 @@ # With cut {{{ # ------------ ( - Direction.PAST, + Direction.CAUSE, s, (0, (1, 2)), (1,), (2,), @@ -67,18 +65,18 @@ Part(mechanism=(1,), purview=())): np.array([0.5, 0.5]).reshape(1, 1, 2, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([1., 0.]).reshape(1, 1, 2), 'phi': 0.5} ), # }}} # }}} -# Future {{{ +# Next {{{ # ~~~~~~~~~~ # No cut {{{ # ---------- ( - Direction.FUTURE, + Direction.EFFECT, s, None, (0, 1, 2), (0, 1, 2), @@ -87,42 +85,42 @@ # breaking ties (Part(mechanism=(2,), purview=()), Part(mechanism=(0, 1), purview=(0, 1, 2))): - np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(), purview=(0,)), Part(mechanism=(0, 1, 2), purview=(1, 2))): - np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(2,), purview=(0,)), Part(mechanism=(0, 1), purview=(1, 2))): - np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0.5, 0.5, 0., 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(0,), purview=()), Part(mechanism=(1, 2), purview=(0, 1, 2))): - np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(), purview=(1,)), Part(mechanism=(0, 1, 2), purview=(0, 2))): - np.array([0., 0., 0., 0., 0.5, 0., 0.5, 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0., 0., 0.5, 0., 0.5, 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(2,), purview=(1,)), Part(mechanism=(0, 1), purview=(0, 2))): - np.array([0., 0., 0., 0., 0.5, 0.5, 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0., 0., 0.5, 0.5, 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(), purview=(2,)), Part(mechanism=(0, 1, 2), purview=(0, 1))): - np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(0,), purview=(2,)), Part(mechanism=(1, 2), purview=(0, 1))): - np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0.5, 0., 0., 0., 0.5, 0., 0., 0.]).reshape( + 2, 2, 2, order="F"), (Part(mechanism=(2,), purview=(0, 1)), Part(mechanism=(0, 1), purview=(2,))): - np.array([0., 0., 0., 0., 0.5, 0.5, 0., 0.]).reshape(2,2,2, - order="F"), + np.array([0., 0., 0., 0., 0.5, 0.5, 0., 0.]).reshape( + 2, 2, 2, order="F"), }, - 'unpartitioned_repertoire': + 'repertoire': np.array([0., 1., 0., 0., 0., 0., 0., 0.]).reshape(2, 2, 2), 'phi': 0.5} ), @@ -131,7 +129,7 @@ # With cut {{{ # ------------ ( - Direction.FUTURE, + Direction.EFFECT, s, ((1, 2), 0), (2,), (1,), @@ -140,11 +138,11 @@ Part(mechanism=(2,), purview=())): np.array([0.5, 0.5]).reshape(1, 2, 1, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([1., 0.]).reshape(1, 2, 1), 'phi': 0.5} ), ( - Direction.FUTURE, + Direction.EFFECT, s, ((0, 2), 1), (2,), (0,), @@ -153,11 +151,11 @@ Part(mechanism=(2,), purview=())): np.array([0.25, 0.75]).reshape(2, 1, 1, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([0.5, 0.5]).reshape(2, 1, 1), 'phi': 0.25} ), ( - Direction.FUTURE, + Direction.EFFECT, s, ((0, 2), 1), (0, 1, 2), (0, 2), @@ -180,11 +178,11 @@ Part(mechanism=(1, 2), purview=(0,))): np.array([0.5, 0., 0.5, 0.]).reshape(2, 1, 2, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([0., 1., 0., 0.]).reshape(2, 1, 2), 'phi': 0.5} ), ( - Direction.FUTURE, + Direction.EFFECT, s, ((0, 1), 2), (1,), (0,), @@ -193,7 +191,7 @@ Part(mechanism=(1,), purview=())): np.array([0.25, 0.75]).reshape(2, 1, 1, order="F") }, - 'unpartitioned_repertoire': + 'repertoire': np.array([0.5, 0.5]).reshape(2, 1, 1), 'phi': 0.25} ) @@ -210,20 +208,22 @@ def test_find_mip(direction, subsystem, cut, mechanism, purview, expected): # IMPORTANT: Since several different ways of partitioning the system can # yield the same phi value, the partition used in finding the MIP is not - # unique. Thus, ``expected['partitions']`` is a dictionary that maps all the - # ways of partitioning the system that yeild the minimal phi value to their - # expected partitioned repertoires. + # unique. Thus, ``expected['partitions']`` is a dictionary that maps all + # the ways of partitioning the system that yeild the minimal phi value to + # their expected partitioned repertoires. if expected: # Construct expected list of possible MIPs expected = [ - Mip(direction=direction, + RepertoireIrreducibilityAnalysis( + direction=direction, partition=expected_partition, mechanism=mechanism, purview=purview, - unpartitioned_repertoire=expected['unpartitioned_repertoire'], + repertoire=expected['repertoire'], partitioned_repertoire=expected_partitioned_repertoire, - phi=expected['phi']) + phi=expected['phi'] + ) for expected_partition, expected_partitioned_repertoire in expected['partitions'].items() ] @@ -248,44 +248,44 @@ def test_find_mip(direction, subsystem, cut, mechanism, purview, expected): # ======================== -def test_mip_past(s): +def test_cause_mip(s): mechanism = s.node_indices purview = s.node_indices - mip_past = s.find_mip(Direction.PAST, mechanism, purview) - assert mip_past == s.mip_past(mechanism, purview) + mip_cause = s.find_mip(Direction.CAUSE, mechanism, purview) + assert mip_cause == s.cause_mip(mechanism, purview) -def test_mip_future(s): +def test_effect_mip(s): mechanism = s.node_indices purview = s.node_indices - mip_future = s.find_mip(Direction.FUTURE, mechanism, purview) - assert mip_future == s.mip_future(mechanism, purview) + mip_effect = s.find_mip(Direction.EFFECT, mechanism, purview) + assert mip_effect == s.effect_mip(mechanism, purview) -def test_phi_mip_past(s): +def test_phi_cause_mip(s): mechanism = s.node_indices purview = s.node_indices - assert (s.phi_mip_past(mechanism, purview) == - s.mip_past(mechanism, purview).phi) + assert (s.phi_cause_mip(mechanism, purview) == + s.cause_mip(mechanism, purview).phi) -def test_phi_mip_past_reducible(s): +def test_phi_cause_mip_reducible(s): mechanism = (1,) purview = (0,) - assert (0 == s.phi_mip_past(mechanism, purview)) + assert s.phi_cause_mip(mechanism, purview) == 0 -def test_phi_mip_future(s): +def test_phi_effect_mip(s): mechanism = s.node_indices purview = s.node_indices - assert (s.phi_mip_future(mechanism, purview) == - s.mip_future(mechanism, purview).phi) + assert (s.phi_effect_mip(mechanism, purview) == + s.effect_mip(mechanism, purview).phi) -def test_phi_mip_future_reducible(s): +def test_phi_effect_mip_reducible(s): mechanism = (0, 1) purview = (1, ) - assert (0 == s.phi_mip_future(mechanism, purview)) + assert s.phi_effect_mip(mechanism, purview) == 0 def test_phi(s): diff --git a/test/test_tpm.py b/test/test_tpm.py index 408ea102f..0b85bba24 100644 --- a/test/test_tpm.py +++ b/test/test_tpm.py @@ -12,7 +12,7 @@ def test_is_state_by_state(): tpm = np.ones((8, 8)) assert is_state_by_state(tpm) - # State-by-node, N-dimensional + # State-by-node, multidimensional tpm = np.ones((2, 2, 2, 3)) assert not is_state_by_state(tpm) @@ -45,5 +45,6 @@ def test_marginalize_out(s): np.array([[[[0.5, 0.0, 0.5], [1.0, 1.0, 0.5]]]])) + def test_infer_cm(rule152): assert np.array_equal(infer_cm(rule152.tpm), rule152.cm) diff --git a/test/test_utils.py b/test/test_utils.py index 252384dbe..7b9bbb01a 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -21,7 +21,7 @@ def test_all_states(): (1, 1, 1), ] assert list(utils.all_states(3)) == states - assert list(utils.all_states(3, holi=True)) == [ + assert list(utils.all_states(3, big_endian=True)) == [ tuple(reversed(state)) for state in states ] @@ -69,7 +69,7 @@ def test_powerset(): def test_powerset_takes_iterable(): a = iter([0, 1]) - assert list(utils.powerset(a)) ==[(), (0,), (1,), (0, 1)] + assert list(utils.powerset(a)) == [(), (0,), (1,), (0, 1)] def test_np_hashable(): diff --git a/test/test_validate.py b/test/test_validate.py index 6bc3c229b..361eba49d 100644 --- a/test/test_validate.py +++ b/test/test_validate.py @@ -9,8 +9,8 @@ def test_validate_direction(): - validate.direction(Direction.PAST) - validate.direction(Direction.FUTURE) + validate.direction(Direction.CAUSE) + validate.direction(Direction.EFFECT) with pytest.raises(ValueError): validate.direction("dogeeeee") @@ -34,10 +34,10 @@ def test_validate_tpm_nonbinary_nodes(): def test_validate_tpm_conditional_independence(): tpm = np.array([ - [1, 0, 0, 0], - [0, .5, .5, 0], - [0, .5, .5, 0], - [0, 0, 0, 1], + [1, 0.0, 0.0, 0], + [0, 0.5, 0.5, 0], + [0, 0.5, 0.5, 0], + [0, 0.0, 0.0, 1], ]) with pytest.raises(ValueError): validate.conditionally_independent(tpm) @@ -48,23 +48,23 @@ def test_validate_tpm_conditional_independence(): validate.tpm(tpm, check_independence=False) -def test_validate_cm_valid(s): - assert validate.connectivity_matrix(s.network.connectivity_matrix) +def test_validate_connectivity_matrix_valid(s): + assert validate.connectivity_matrix(s.network.cm) -def test_validate_cm_not_square(): +def test_validate_connectivity_matrix_not_square(): cm = np.random.binomial(1, 0.5, (4, 5)) with pytest.raises(ValueError): assert validate.connectivity_matrix(cm) -def test_validate_cm_not_2D(): +def test_validate_connectivity_matrix_not_2D(): cm = np.arange(8).reshape(2, 2, 2) with pytest.raises(ValueError): assert validate.connectivity_matrix(cm) -def test_validate_cm_not_binary(): +def test_validate_connectivity_matrix_not_binary(): cm = np.arange(16).reshape(4, 4) with pytest.raises(ValueError): assert validate.connectivity_matrix(cm)