diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index 1a82d4c0..00000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[bumpversion] -current_version = 0.1.0 -commit = True -tag = True - -[bumpversion:file:allensdk/ipfx/__init__.py] -search = __version__ = '{current_version}' -replace = __version__ = '{new_version}' - -[bumpversion:file:setup.py] -search = version = '{current_version}' -replace = version = '{new_version}' - -[bumpversion:file:.cookiecutter/.cookiecutter.json] -search = version = "{current_version}" -replace = version = "{new_version}" - -[bumpversion:file:.cookiecutter/.cookiecutter.yaml] -search = version = {current_version} -replace = version = {new_version} diff --git a/.circleci/config.yml b/.circleci/config.yml index 5ec8a62c..430232ef 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -49,7 +49,7 @@ jobs: name: install test dependencies command: | . venv/bin/activate - pip install -r test_requirements.txt + pip install -r requirements-test.txt # run tests! - run: diff --git a/.github/workflows/github-actions-ci.yml b/.github/workflows/github-actions-ci.yml new file mode 100644 index 00000000..1ba7e434 --- /dev/null +++ b/.github/workflows/github-actions-ci.yml @@ -0,0 +1,85 @@ +name: ci/github-actions + +on: + pull_request: + branches: [ master, feature/** ] + + push: + branches: [ master, feature/** ] + + +jobs: + run-light-tests: + env: + SKIP_LIMS: true + TEST_INHOUSE: false + ALLOW_TEST_DOWNLOADS: false + TEST_API_ENDPOINT: "http://api.brain-map.org" + IPFX_TEST_TIMEOUT: 60 + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.11"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install -y hdf5-tools curl + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install allensdk + - name: Run tests + run: | + pip install -r requirements-test.txt + mkdir -p test-results + git lfs install + git config lfs.url 'https://github.com/AllenInstitute/ipfx.git/info/lfs' + git lfs env + git lfs pull + python -m pytest --junitxml=test-results/junit.xml --verbose + + + onprem-tests: + name: on-prem tests + runs-on: ["self-hosted"] + strategy: + matrix: + image: ["ipfx_py39:latest", "ipfx_py311:latest"] + steps: + - uses: actions/checkout@v4 + - name: run test in docker + run: | + docker run \ + --env-file ~/env.list \ + --mount type=bind,source=$PWD,target=/local1/github_worker,bind-propagation=rshared \ + -v /data/informatics/module_test_data/:/data/informatics/module_test_data/ \ + -v /allen/aibs/informatics/module_test_data/:/allen/aibs/informatics/module_test_data/ \ + -v /allen/programs/celltypes/production/mousecelltypes/:/allen/programs/celltypes/production/mousecelltypes/ \ + -v /allen/programs/celltypes/workgroups/279/:/allen/programs/celltypes/workgroups/279/ \ + -v /allen/programs/celltypes/production/humancelltypes/:/allen/programs/celltypes/production/humancelltypes/ \ + --workdir /local1/github_worker --rm \ + --user 1001:1001 \ + ${{ matrix.image }} \ + /bin/bash -c "python -m venv .venv; \ + source .venv/bin/activate; \ + pip install --upgrade pip; \ + pip install numpy; \ + pip install -r requirements.txt; \ + export TEST_API_ENDPOINT=http://api.brain-map.org; \ + export SKIP_LIMS=false; \ + export TEST_INHOUSE=true; \ + export ALLOW_TEST_DOWNLOADS=true; \ + export IPFX_TEST_TIMEOUT=60; \ + pip install -r requirements-test.txt; \ + git config lfs.url 'https://github.com/AllenInstitute/ipfx.git/info/lfs'; \ + git lfs env; \ + git lfs pull; \ + python -m pytest \ + --junitxml=test-reports/test.xml --verbose" \ No newline at end of file diff --git a/.github/workflows/nightly-onprem.yml b/.github/workflows/nightly-onprem.yml new file mode 100644 index 00000000..02a67f7a --- /dev/null +++ b/.github/workflows/nightly-onprem.yml @@ -0,0 +1,42 @@ +name: IPFX Nightly onprem tests +on: + schedule: + - cron: '05 6 * * *' + + +jobs: + onprem: + name: on-prem tests + runs-on: ["self-hosted"] + strategy: + matrix: + image: ["ipfx_py39:latest", "ipfx_py311:latest"] + branch: ["master", "feature/**"] + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ matrix.branch }} + - name: run test in docker + run: | + docker run \ + --mount type=bind,source=$PWD,target=/local1/github_worker,bind-propagation=rshared \ + -v /data/informatics/module_test_data/:/data/informatics/module_test_data/ \ + -v /allen/aibs/informatics/module_test_data/:/allen/aibs/informatics/module_test_data/ \ + -v /allen/programs/celltypes/production/mousecelltypes/:/allen/programs/celltypes/production/mousecelltypes/ \ + -v /allen/programs/celltypes/workgroups/279/:/allen/programs/celltypes/workgroups/279/ \ + -v /allen/programs/celltypes/production/humancelltypes/:/allen/programs/celltypes/production/humancelltypes/ \ + --workdir /local1/github_worker --rm \ + --user 1001:1001 \ + ${{ matrix.image }} \ + /bin/bash -c "python -m venv .venv; \ + source .venv/bin/activate; \ + pip install --upgrade pip; \ + pip install numpy; \ + pip install -r requirements.txt; \ + export TEST_API_ENDPOINT=http://api.brain-map.org; \ + pip install -r requirements-test.txt; \ + git config lfs.url 'https://github.com/AllenInstitute/ipfx.git/info/lfs'; \ + git lfs env; \ + git lfs pull; \ + python -m pytest \ + --junitxml=test-reports/test.xml --verbose" diff --git a/.gitignore b/.gitignore index bc645b8c..c29453a6 100644 --- a/.gitignore +++ b/.gitignore @@ -78,5 +78,14 @@ sandbox/ #linters .mypy_cache -#virtualenv -venv \ No newline at end of file +# virtualenv / environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# MacOS Finder +.DS_Store diff --git a/AUTHORS.rst b/AUTHORS.rst index 50b31a90..21b36d48 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -12,3 +12,4 @@ Credits * Nile Graddis @nilegraddis * Sergey Gratiy @sgratiy * Yang Yu @gnayuy +* Sherif Soliman @sheriferson \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index f28af2ed..1425bb53 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ All notable changes to this project will be documented in this file. ### Added ### Changed +- Removed Python 3.6 support +- Updated dependencies and library for Python 3.9 to 3.11 support +- Moved CI and testing to GitHub Actions ## [1.0.8] = 2023-06-29 Changed: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9b4e686a..a8036d87 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -96,7 +96,7 @@ visual git plugins, we prefer the following convention for branch naming: GH-712/bugfix/auto-reward-key GH-9999/feature/parallel-behavior-analysis ``` -* Create an environment and install necessary requirements: `requirements.txt` and `test_requirements.txt` +* Create an environment and install necessary requirements: `requirements.txt` and `requirements-test.txt` * Start writing code! ### Style guidelines diff --git a/MANIFEST.in b/MANIFEST.in index 70d7db81..1db431d4 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,5 @@ include requirements.txt include requirements-test.txt -include requirements-dev.txt include AUTHORS.rst include CHANGELOG.md include README.md diff --git a/README.md b/README.md index 303169f2..72b7cbf4 100644 --- a/README.md +++ b/README.md @@ -22,8 +22,6 @@ We welcome contributions! Please see our [contribution guide](https://github.com Deprecation Warning ------------------- -The 1.0.0 release of ipfx brings some new features, like NWB2 support, along with improvements to our documentation and testing. We will also drop support for -- NWB1 -- Python 2 -Older versions of ipfx will continue to be available, but may receive only occasional bugfixes and patches. +The 2.0.0 release of IPFX drops support for Python 3.6 which reached end of life and stopped receiving security updated on December 23, 2021. +IPFX is now tested on Python 3.9 and higher. diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 00000000..73ca607d --- /dev/null +++ b/docker/README.md @@ -0,0 +1,21 @@ +# Docker images for on premise testing + +This directory contains Dockerfiles for building images for running on-prem tests that require internal Allen Institute resources. On-prem tests use GitHub self-hosted runners that will run tests on docker images built from these Dockerfiles. + +Our light and on-prem tests are defined in [our workflow file](../.github/workflows/github-actions-ci.yml "Link to GitHub Actions workflow for light and on-prem tests"). + +- See [here](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners) for more information on self-hosted runners. +- See [here](https://docs.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners) for more information on adding self-hosted runners to a GitHub repository. + +## Building images + +If you are an Allen Institute developer, you will have instructions on how to access the machine running the IPFX self-hosted runner. + +On this machine you can create the Docker image corresponding to the Python versions we test on by running: + +``` +cd ipfx/docker/py39 +docker build -t ipfx_py39:latest . +``` + +And this should be sufficient for the on-prem tests defined in our GitHub workflow to run. diff --git a/docker/py311/Dockerfile b/docker/py311/Dockerfile new file mode 100644 index 00000000..0eaac054 --- /dev/null +++ b/docker/py311/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.11 + +RUN apt-get update \ + && apt-get install -y \ + hdf5-tools \ + curl \ + git-lfs \ + && rm -rf /var/lib/apt/lists/* + +RUN curl https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash \ No newline at end of file diff --git a/docker/py36/Dockerfile b/docker/py39/Dockerfile similarity index 54% rename from docker/py36/Dockerfile rename to docker/py39/Dockerfile index 8b3f0b88..c52d2229 100644 --- a/docker/py36/Dockerfile +++ b/docker/py39/Dockerfile @@ -1,10 +1,10 @@ -FROM python:3.6 +FROM python:3.9 RUN apt-get update \ && apt-get install -y \ hdf5-tools \ - curl + curl \ + git-lfs \ + && rm -rf /var/lib/apt/lists/* -RUN curl https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash - -RUN apt-get install git-lfs +RUN curl https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash \ No newline at end of file diff --git a/ipfx/attach_metadata/sink/metadata_sink.py b/ipfx/attach_metadata/sink/metadata_sink.py index 6035b1f5..bb923867 100644 --- a/ipfx/attach_metadata/sink/metadata_sink.py +++ b/ipfx/attach_metadata/sink/metadata_sink.py @@ -112,7 +112,7 @@ def _ensure_plural_targets( targets = self.targets elif isinstance(targets, dict): targets = [targets] - elif isinstance(targets, collections.Sequence): + elif isinstance(targets, collections.abc.Sequence): targets = list(targets) else: raise ValueError( diff --git a/ipfx/attach_metadata/sink/nwb2_sink.py b/ipfx/attach_metadata/sink/nwb2_sink.py index a90407b8..2f8f7c07 100644 --- a/ipfx/attach_metadata/sink/nwb2_sink.py +++ b/ipfx/attach_metadata/sink/nwb2_sink.py @@ -92,7 +92,6 @@ def _commit_nwb_changes(self): recorded until _reload_nwbfile """ - set_container_sources(self.nwbfile, self._h5_file.filename) self.nwbfile.set_modified(True) # Because the NWB schema versions of NWB data produced by MIES are older # we do not want to cache the newer schema versions that IPFX is currently using @@ -236,36 +235,3 @@ def serialize(self, targets: Optional[OneOrMany[Dict[str, Any]]] = None): file_.write(self._data.getvalue()) self._reload_nwbfile() - - -def set_container_sources( - container: hdmf.container.AbstractContainer, - source: str -): - """Traverse an NWBFile starting at a given container, setting the - container_source attribute inplace on each container. - - Parameters - ---------- - container : container_source will be set on this object as well as on - each of its applicable children. - source : The new value of container source - """ - children = [container] - while children: - current = children.pop() - - # 💀💀💀 - # container_source is set on write, but cannot be overrwritten, making - # read -> modify -> write elsewhere - # pretty tricky! - # this is a fragile workaround - if hasattr(current, "_AbstractContainer__container_source"): - setattr( - current, - "_AbstractContainer__container_source", - source - ) - - if hasattr(current, "children"): - children.extend(current.children) diff --git a/ipfx/dataset/create.py b/ipfx/dataset/create.py index f1150786..89295bc6 100644 --- a/ipfx/dataset/create.py +++ b/ipfx/dataset/create.py @@ -31,7 +31,12 @@ def is_file_mies(path: str) -> bool: with h5py.File(path, "r") as fil: if "generated_by" in fil["general"].keys(): generated_by = dict(fil["general"]["generated_by"][:]) - return generated_by.get("Package", "None") == "MIES" + try: + decoded_generation_info = {k.decode(): v.decode() for k, v in generated_by.items()} + except: + decoded_generation_info = generated_by + + return decoded_generation_info.get("Package", "None") == "MIES" return False diff --git a/ipfx/version.txt b/ipfx/version.txt index b0f3d96f..359a5b95 100644 --- a/ipfx/version.txt +++ b/ipfx/version.txt @@ -1 +1 @@ -1.0.8 +2.0.0 \ No newline at end of file diff --git a/ipfx/x_to_nwb/ABFConverter.py b/ipfx/x_to_nwb/ABFConverter.py index 907ad4da..b84a23cc 100644 --- a/ipfx/x_to_nwb/ABFConverter.py +++ b/ipfx/x_to_nwb/ABFConverter.py @@ -79,10 +79,10 @@ def __init__(self, inFileOrFolder, outFile, outputFeedbackChannel, compression=T nwbFile.add_ic_electrode(electrodes) for i in self._createStimulusSeries(electrodes): - nwbFile.add_stimulus(i) + nwbFile.add_stimulus(i, use_sweep_table=True) for i in self._createAcquiredSeries(electrodes): - nwbFile.add_acquisition(i) + nwbFile.add_acquisition(i, use_sweep_table=True) with NWBHDF5IO(outFile, "w") as io: io.write(nwbFile, cache_spec=True) diff --git a/ipfx/x_to_nwb/DatConverter.py b/ipfx/x_to_nwb/DatConverter.py index 4bd3fe22..cec26233 100644 --- a/ipfx/x_to_nwb/DatConverter.py +++ b/ipfx/x_to_nwb/DatConverter.py @@ -71,10 +71,10 @@ def generateList(multipleGroupsPerFile, pul): nwbFile.add_ic_electrode(electrodes) for i in self._createAcquiredSeries(electrodes, elem): - nwbFile.add_acquisition(i) + nwbFile.add_acquisition(i, use_sweep_table=True) for i in self._createStimulusSeries(electrodes, elem): - nwbFile.add_stimulus(i) + nwbFile.add_stimulus(i, use_sweep_table=True) if multipleGroupsPerFile: outFileFmt = outFile diff --git a/test_requirements.txt b/requirements-test.txt similarity index 100% rename from test_requirements.txt rename to requirements-test.txt diff --git a/requirements.txt b/requirements.txt index 01e6e4c2..e5d8da72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,18 @@ -argschema<2.0.0 allensdk +argschema dictdiffer -h5py==2.10.0 -marshmallow==3.0.0rc6 -matplotlib>=1.4.3 +h5py +marshmallow +matplotlib methodtools -numpy>=1.15.4,<1.19.0 -pandas>=0.25.1,<=0.25.3 +numpy +pandas pg8000 pillow -pyabf<2.3.0 -pynwb>=1.3.2,<2.0.0 -pyYAML<6.0.0 -scipy>=0.15.1 -simplejson>=3.10.0 +pyabf +pynwb==2.2.0 +pyYAML +ruamel.yaml<0.18.0 +scipy +simplejson watchdog diff --git a/run_tests_with_docker.sh b/run_tests_with_docker.sh index b7dfe2ac..235c3f0a 100644 --- a/run_tests_with_docker.sh +++ b/run_tests_with_docker.sh @@ -7,5 +7,5 @@ docker run -v ${PWD}:/root/ipfx \ /bin/bash --login -c "source activate py27; \ export TEST_COMPLETE=true; \ pip install -r requirements.txt; \ - pip install -r test_requirements.txt; \ + pip install -r requirements-test.txt; \ cd tests; py.test || exit 0" diff --git a/setup.py b/setup.py index 0e82fa98..12f6e559 100644 --- a/setup.py +++ b/setup.py @@ -68,7 +68,8 @@ def run(self): "License :: Other/Proprietary License", # Allen Institute Software License "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.11", "Topic :: Scientific/Engineering :: Bio-Informatics" ], cmdclass={'check_version': CheckVersionCommand} diff --git a/tests/attach_metadata/test_cli.py b/tests/attach_metadata/test_cli.py index 6940507f..094fd906 100644 --- a/tests/attach_metadata/test_cli.py +++ b/tests/attach_metadata/test_cli.py @@ -60,9 +60,11 @@ def simple_nwb(base_path): pynwb.TimeSeries( name="a timeseries", data=[1, 2, 3], + unit='s', starting_time=0.0, rate=1.0 - ) + ), + use_sweep_table=True ) with pynwb.NWBHDF5IO(path=in_nwb_path, mode="w") as writer: writer.write(nwbfile) diff --git a/tests/attach_metadata/test_nwb2_sink.py b/tests/attach_metadata/test_nwb2_sink.py index 3c2e05f8..567c0686 100644 --- a/tests/attach_metadata/test_nwb2_sink.py +++ b/tests/attach_metadata/test_nwb2_sink.py @@ -42,27 +42,12 @@ def nwbfile(): electrode=ice, sweep_number=12 ) - _nwbfile.add_acquisition(series) + _nwbfile.add_acquisition(series, use_sweep_table=True) _nwbfile.subject = pynwb.file.Subject() return _nwbfile -def test_set_container_sources(nwbfile): - ts = pynwb.TimeSeries( - name="a timeseries", - data=[1, 2, 3], - starting_time=0.0, - rate=1.0 - ) - nwbfile.add_acquisition(ts) - - nwb2_sink.set_container_sources(nwbfile, "foo") - assert ts.container_source == "foo" - assert nwbfile.container_source == "foo" - assert nwbfile.subject.container_source == "foo" - - def test_get_single_ic_electrode(nwbfile): sink = nwb2_sink.Nwb2Sink(None) sink.nwbfile = nwbfile diff --git a/tests/conftest.py b/tests/conftest.py index 2dc73f2a..7649bdc4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -87,10 +87,7 @@ def pytest_collection_modifyitems(config, items): """ skip_requires_lims = pytest.mark.skipif( - ( - os.environ.get("SKIP_LIMS", "false") == "true" - or not lq.able_to_connect_to_lims() - ), + os.environ.get("SKIP_LIMS", "false") == "true", reason='This test requires connection to lims' ) if config.getoption("--do-x-nwb-tests"): @@ -117,7 +114,7 @@ def pytest_collection_modifyitems(config, items): ) for item in items: - if 'requires_lims' in item.keywords: + if item.get_closest_marker("requires_lims"): item.add_marker(skip_requires_lims) if "xnwbtest" in item.keywords: diff --git a/tests/data/nwb/Ctgf-T2A-dgCre;Ai14-495723.05.02.01.nwb b/tests/data/nwb/Ctgf-T2A-dgCre;Ai14-495723.05.02.01.nwb old mode 100755 new mode 100644 index 563055c3..dc00cbd3 Binary files a/tests/data/nwb/Ctgf-T2A-dgCre;Ai14-495723.05.02.01.nwb and b/tests/data/nwb/Ctgf-T2A-dgCre;Ai14-495723.05.02.01.nwb differ diff --git a/tests/dataset/test_ephys_nwb_data.py b/tests/dataset/test_ephys_nwb_data.py index f95c9bf8..4cec15d3 100644 --- a/tests/dataset/test_ephys_nwb_data.py +++ b/tests/dataset/test_ephys_nwb_data.py @@ -58,7 +58,7 @@ def nwbfile_to_test(): **stimulus_meta_data ) - nwbfile.add_stimulus(stimulus_series) + nwbfile.add_stimulus(stimulus_series, use_sweep_table=True) response_data = [1, 2, 3, 4, 5] response_meta_data = { @@ -80,7 +80,7 @@ def nwbfile_to_test(): **response_meta_data ) - nwbfile.add_acquisition(acquisition_series) + nwbfile.add_acquisition(acquisition_series, use_sweep_table=True) return nwbfile diff --git a/tests/test_lims_queries.py b/tests/test_lims_queries.py index cef6c5c1..a15fe053 100644 --- a/tests/test_lims_queries.py +++ b/tests/test_lims_queries.py @@ -1,11 +1,7 @@ import pytest import ipfx.lims_queries as lq - -if not lq.able_to_connect_to_lims(): - pytest.skip("cannot connect to LIMS", allow_module_level=True) - - +@pytest.mark.requires_lims def test_get_specimen_info_from_lims_by_id(): specimen_id = 500844783 @@ -13,7 +9,7 @@ def test_get_specimen_info_from_lims_by_id(): assert result == (u'Vip-IRES-Cre;Ai14(IVSCC)-226110.03.01.01', 500844779, 500844783) - +@pytest.mark.requires_lims def test_get_nwb_path_from_lims(): ephys_roi_result = 500844779 @@ -21,7 +17,7 @@ def test_get_nwb_path_from_lims(): assert result == "/allen/programs/celltypes/production/mousecelltypes/prod589/Ephys_Roi_Result_500844779/500844779.nwb" - +@pytest.mark.requires_lims def test_get_igorh5_path_from_lims(): ephys_roi_result = 500844779 diff --git a/tests/test_nwb_utils.py b/tests/test_nwb_utils.py new file mode 100644 index 00000000..3c59d189 --- /dev/null +++ b/tests/test_nwb_utils.py @@ -0,0 +1,17 @@ +from ipfx.dataset.create import is_file_mies, create_ephys_data_set + +TEST_MIES_FILE_PATH = 'tests/data/nwb/Ctgf-T2A-dgCre;Ai14-495723.05.02.01.nwb' +TEST_NON_MIES_FILE_PATH = 'tests/data/2018_03_20_0005.nwb' + +def test_is_file_mies(): + assert is_file_mies(TEST_MIES_FILE_PATH) == True + +def test_is_file_mies_with_non_mies_file(): + """ + Tests the case where the file is not detected as a MIES file. + The file is read using the ipfx.dataset.hbg_nwb_data.HBGNWBData class. + """ + assert is_file_mies(TEST_NON_MIES_FILE_PATH) == False + nwb_data = create_ephys_data_set(TEST_NON_MIES_FILE_PATH) + assert nwb_data is not None + assert len(nwb_data.ontology.stimuli) > 0 \ No newline at end of file diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 7d674a67..00000000 --- a/tox.ini +++ /dev/null @@ -1,10 +0,0 @@ -[tox] -envlist = python2.7, python3.6 - -[testenv] - -deps = - pytest - -commands = pytest -