From d7cc8f1c1c296d24589df50ec1bbe6f7a657914f Mon Sep 17 00:00:00 2001 From: Stefan Jansen Date: Tue, 7 Sep 2021 23:10:57 -0400 Subject: [PATCH] CI fix tests (#8) - GHA update - remove nbs test - add pre-commit config - add README badges --- .github/dependabot.yml | 14 +++++ .github/workflows/build_wheels.yml | 6 +- .github/workflows/conda_package.yml | 61 +++++++++++-------- .github/workflows/unit_tests.yml | 4 +- .pre-commit-config.yaml | 20 ++++++ README.md | 11 +++- conda/recipe/meta.yaml | 2 +- docs/deploy.py | 10 +-- .../round_trip_tear_sheet_example.ipynb | 55 +++++++++-------- pyfolio/tests/test_nbs.py | 21 ------- pyfolio/tests/test_perf_attrib.py | 39 +++++++----- pyfolio/tests/test_pos.py | 6 +- pyproject.toml | 7 +++ setup.cfg | 8 +-- tox.ini | 17 +++--- 15 files changed, 163 insertions(+), 118 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .pre-commit-config.yaml delete mode 100755 pyfolio/tests/test_nbs.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..da38e79f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + ignore: + # Official actions have moving tags like v1 + # that are used, so they don't need updates here + - dependency-name: "actions/checkout" + - dependency-name: "actions/upload-artifact" + - dependency-name: "actions/download-artifact" + - dependency-name: "actions/setup-python" diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 260ab904..db122d2d 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -11,7 +11,6 @@ on: types: - published - jobs: dist: runs-on: ${{ matrix.os }} @@ -44,15 +43,13 @@ jobs: upload_pypi: needs: [ dist ] runs-on: ubuntu-latest - # upload to (Test)-PyPI on push with tag starting with v; diff by commit - # if: startsWith(github.ref, 'refs/tags') steps: - uses: actions/download-artifact@v2 with: name: artifact path: dist - name: publish to testpypi - uses: pypa/gh-action-pypi-publish@v1.4.1 + uses: pypa/gh-action-pypi-publish@master if: ${{ github.event.inputs.target }} == 'TESTPYPI' with: user: __token__ @@ -61,7 +58,6 @@ jobs: - name: publish to pypi uses: pypa/gh-action-pypi-publish@master if: ${{ github.event.inputs.target }} == 'PYPI' || (github.event_name == 'release' && github.event.action == 'published') - # if: ${{ github.event.inputs.target }} == 'PYPI' with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/conda_package.yml b/.github/workflows/conda_package.yml index a905fd0b..685df806 100644 --- a/.github/workflows/conda_package.yml +++ b/.github/workflows/conda_package.yml @@ -1,4 +1,4 @@ -name: conda +name: Anaconda on: workflow_dispatch @@ -16,9 +16,7 @@ jobs: fail-fast: false matrix: os: [ macos-latest, windows-latest, ubuntu-latest ] -# os: [ ubuntu-latest ] - python: [ '3.7', '3.8'] -# python: [ '3.9' ] + python: [ '3.7', '3.8', '3.9'] steps: - name: Checkout pyfolio-reloaded @@ -29,21 +27,37 @@ jobs: with: miniconda-version: latest auto-update-conda: true + channel-priority: true + mamba-version: "*" python-version: ${{ matrix.python }} activate-environment: recipe - channels: ml4t, conda-forge, defaults, anaconda + channels: conda-forge, defaults, anaconda + + - name: create uploader + # address broken client under py3.9 + if: ${{ matrix.python == '3.9' }} + run: conda create -n up python=3.7 anaconda-client - name: conda build for ${{ matrix.os }} run: | conda activate recipe - conda install -n recipe conda-build conda-verify anaconda-client - conda-build --output-folder . --python ${{ matrix.python }} conda/recipe/ + mamba install -n recipe boa conda-verify anaconda-client + conda mambabuild --output-folder . --python ${{ matrix.python }} conda.recipe - - name: store macos result + - name: activate uploader + # address broken client under py3.9 + if: ${{ matrix.python == '3.9' }} + run: conda activate up + + - name: store windows result uses: actions/upload-artifact@v2 - if: ${{ matrix.os == 'macos-latest' }} + if: ${{ matrix.os == 'windows-latest' }} with: - path: osx-64/*.tar.bz2 + path: win-64/*.tar.bz2 + + - name: upload windows + if: ${{ matrix.os == 'windows-latest' }} + run: anaconda upload -l main -u ml4t win-64/*.tar.bz2 - name: store linux result uses: actions/upload-artifact@v2 @@ -51,23 +65,16 @@ jobs: with: path: linux-64/*.tar.bz2 - - name: store windows result + - name: upload linux + if: ${{ matrix.os == 'ubuntu-latest' }} + run: anaconda upload -l main -u ml4t linux-64/*.tar.bz2 + + - name: store macos result uses: actions/upload-artifact@v2 - if: ${{ matrix.os == 'windows-latest' }} + if: ${{ matrix.os == 'macos-latest' }} with: - path: win-64/*.tar.bz2 + path: osx-64/*.tar.bz2 - - name: upload ${{ matrix.os }} result to anaconcda - if: ${{ matrix.python != '3.9'}} - env: - OS: ${{ matrix.os }} - run: | - if [ "$OS" == "ubuntu-latest" ] ; then - anaconda upload -l main -u ml4t linux-64/*.tar.bz2 - else - if [ "$OS" == "macos-latest" ] ; then - anaconda upload -l main -u ml4t osx-64/*.tar.bz2 - else - anaconda upload -l main -u ml4t win-64/*.tar.bz2 - fi - fi + - name: upload macos + if: ${{ matrix.os == 'macos-latest' }} + run: anaconda upload -l main -u ml4t osx-64/*.tar.bz2 diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 74c003fa..72182cae 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -36,6 +36,6 @@ jobs: run: | flake8 - - name: Unittests with tox & nose + - name: Tests with tox & pytest run: | - tox -p auto -q + tox diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..1ce51389 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: check-yaml + - id: check-merge-conflict + - id: end-of-file-fixer + - id: trailing-whitespace + +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.1 + hooks: + - id: flake8 + +- repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + +exclude: '^conda/recipe/meta.yaml$' diff --git a/README.md b/README.md index a60c2250..7d1ef46b 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,14 @@

- -[![Join the chat at https://gitter.im/stefan-jansen/pyfolio-reloaded](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/stefan-jansen/pyfolio-reloaded?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![PyPI Wheels](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/distribution.yml/badge.svg)](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/distribution.yml) +![PyPI](https://img.shields.io/pypi/v/pyfolio-reloaded) +[![Tests](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/unit_tests.yml/badge.svg)](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/unit_tests.yml) +[![conda](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/conda_package.yml/badge.svg)](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/conda_package.yml) +[![PyPI](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/build_wheels.yml/badge.svg)](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/build_wheels.yml) +[![Coverage Status](https://coveralls.io/repos/github/stefan-jansen/pyfolio-reloaded/badge.svg?branch=main)](https://coveralls.io/github/stefan-jansen/pyfolio-reloaded?branch=main) +![GitHub issues](https://img.shields.io/github/issues/stefan-jansen/pyfolio-reloaded) +![Discourse users](https://img.shields.io/discourse/users?server=https%3A%2F%2Fexchange.ml4trading.io%2F) +![Twitter Follow](https://img.shields.io/twitter/follow/ml4trading?style=social) pyfolio is a Python library for performance and risk analysis of financial portfolios that works well with the [Zipline](https://zipline.ml4trading.io/) open source backtesting library. diff --git a/conda/recipe/meta.yaml b/conda/recipe/meta.yaml index 5c1f9273..fba97f4a 100644 --- a/conda/recipe/meta.yaml +++ b/conda/recipe/meta.yaml @@ -29,7 +29,7 @@ requirements: - scipy >=0.14.0 - scikit-learn>=0.16.1 - seaborn >=0.7.1 - - empyrical-reloaded >=0.5.6 + - empyrical-reloaded >=0.5.7 # pending update test: imports: diff --git a/docs/deploy.py b/docs/deploy.py index 4f3edfef..e83536d4 100644 --- a/docs/deploy.py +++ b/docs/deploy.py @@ -9,8 +9,8 @@ from subprocess import check_call HERE = Path(__file__).resolve(strict=True).parent -ALPHALENS_ROOT = HERE.parent -TEMP_LOCATION = "/tmp/alphalens-doc" +PYFOLIO_ROOT = HERE.parent +TEMP_LOCATION = "/tmp/pyfolio-doc" TEMP_LOCATION_GLOB = TEMP_LOCATION + "/*" @@ -49,8 +49,8 @@ def main(): print("Copying built files to temp location.") move("build/html", TEMP_LOCATION) - print("Moving to '%s'" % ALPHALENS_ROOT) - os.chdir(ALPHALENS_ROOT) + print("Moving to '%s'" % PYFOLIO_ROOT) + os.chdir(PYFOLIO_ROOT) print("Checking out gh-pages branch.") check_call( @@ -77,7 +77,7 @@ def main(): os.chdir(old_dir) print() - print("Updated documentation branch in directory %s" % ALPHALENS_ROOT) + print("Updated documentation branch in directory %s" % PYFOLIO_ROOT) print("If you are happy with these changes, commit and push to gh-pages.") diff --git a/pyfolio/examples/round_trip_tear_sheet_example.ipynb b/pyfolio/examples/round_trip_tear_sheet_example.ipynb index 771c3109..46b2c033 100644 --- a/pyfolio/examples/round_trip_tear_sheet_example.ipynb +++ b/pyfolio/examples/round_trip_tear_sheet_example.ipynb @@ -27,8 +27,13 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, + "execution_count": 1, + "metadata": { + "ExecuteTime": { + "end_time": "2021-09-07T02:43:48.874652Z", + "start_time": "2021-09-07T02:43:48.864891Z" + } + }, "outputs": [], "source": [ "# silence warnings\n", @@ -38,11 +43,11 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:31:32.084691Z", - "start_time": "2021-04-20T00:31:30.976425Z" + "end_time": "2021-09-07T02:43:50.546591Z", + "start_time": "2021-09-07T02:43:48.989776Z" } }, "outputs": [], @@ -63,11 +68,11 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:31:32.410509Z", - "start_time": "2021-04-20T00:31:32.335185Z" + "end_time": "2021-09-07T02:43:51.697971Z", + "start_time": "2021-09-07T02:43:51.616535Z" } }, "outputs": [], @@ -89,11 +94,11 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:31:33.084992Z", - "start_time": "2021-04-20T00:31:33.077720Z" + "end_time": "2021-09-07T02:43:52.439793Z", + "start_time": "2021-09-07T02:43:52.437219Z" } }, "outputs": [], @@ -120,11 +125,11 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:32:01.944657Z", - "start_time": "2021-04-20T00:31:34.193583Z" + "end_time": "2021-09-07T02:44:23.168045Z", + "start_time": "2021-09-07T02:43:54.942707Z" }, "scrolled": false }, @@ -615,11 +620,11 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:32:07.044586Z", - "start_time": "2021-04-20T00:32:01.945747Z" + "end_time": "2021-09-07T02:44:28.339232Z", + "start_time": "2021-09-07T02:44:23.171046Z" } }, "outputs": [], @@ -630,11 +635,11 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:32:07.054464Z", - "start_time": "2021-04-20T00:32:07.045701Z" + "end_time": "2021-09-07T02:44:28.358198Z", + "start_time": "2021-09-07T02:44:28.340552Z" } }, "outputs": [ @@ -745,7 +750,7 @@ "4 0.112198 AMD 2 days -0.001249 " ] }, - "execution_count": 6, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -756,11 +761,11 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": { "ExecuteTime": { - "end_time": "2021-04-20T00:32:07.141707Z", - "start_time": "2021-04-20T00:32:07.055460Z" + "end_time": "2021-09-07T02:44:28.443008Z", + "start_time": "2021-09-07T02:44:28.359105Z" } }, "outputs": [ @@ -1165,4 +1170,4 @@ }, "nbformat": 4, "nbformat_minor": 1 -} \ No newline at end of file +} diff --git a/pyfolio/tests/test_nbs.py b/pyfolio/tests/test_nbs.py deleted file mode 100755 index 514ca5fd..00000000 --- a/pyfolio/tests/test_nbs.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -""" -simple example script for running notebooks and reporting exceptions. -Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]` -Each cell is submitted to the kernel, and checked for errors. -""" - -from pathlib import Path -from runipy.notebook_runner import NotebookRunner -from pyfolio.ipycompat import read as read_notebook - -EXAMPLES_PATH = Path(__file__).resolve().parent.parent / "examples" - - -def test_nbs(): - for ipynb in EXAMPLES_PATH.glob("*.ipynb"): - print(ipynb) - with open(ipynb) as f: - nb = read_notebook(f, "json") - nb_runner = NotebookRunner(nb) - nb_runner.run_notebook(skip_exceptions=False) diff --git a/pyfolio/tests/test_perf_attrib.py b/pyfolio/tests/test_perf_attrib.py index e8e9e5d7..34886ea7 100644 --- a/pyfolio/tests/test_perf_attrib.py +++ b/pyfolio/tests/test_perf_attrib.py @@ -446,14 +446,17 @@ def test_missing_stocks_and_dates(self): factor_returns, factor_loadings_missing_stocks, ) - - self.assertEqual(len(w), 1) + # avoids test failure due to DeprecationWarning for pandas>=1.0, <1.1 + w_ = [warn for warn in w if issubclass(warn.category, UserWarning)] + self.assertEqual(len(w_), 1) self.assertIn( "The following assets were missing factor loadings: " "['TLT']", - str(w[-1].message), + str(w_[-1].message), + ) + self.assertIn( + "Ratio of assets missing: 0.333", str(w_[-1].message) ) - self.assertIn("Ratio of assets missing: 0.333", str(w[-1].message)) # missing dates should raise a warning missing_dates = ["2017-01-01", "2017-01-05"] @@ -465,12 +468,12 @@ def test_missing_stocks_and_dates(self): factor_returns, factor_loadings_missing_dates, ) - - self.assertEqual(len(w), 2) + w_ = [warn for warn in w if issubclass(warn.category, UserWarning)] + self.assertEqual(len(w_), 2) self.assertIn( "Could not find factor loadings for " "{} dates".format(len(missing_dates)), - str(w[-1].message), + str(w_[-1].message), ) for date in missing_dates: @@ -484,12 +487,12 @@ def test_missing_stocks_and_dates(self): factor_returns.drop(pd.DatetimeIndex(missing_dates)), factor_loadings_missing_dates, ) - - self.assertEqual(len(w), 3) + w_ = [warn for warn in w if issubclass(warn.category, UserWarning)] + self.assertEqual(len(w_), 3) self.assertIn( "Could not find factor loadings for " "{} dates".format(len(missing_dates)), - str(w[-1].message), + str(w_[-1].message), ) for date in missing_dates: @@ -507,19 +510,21 @@ def test_missing_stocks_and_dates(self): factor_returns, factor_loadings_missing_both, ) - - self.assertEqual(len(w), 5) + w_ = [warn for warn in w if issubclass(warn.category, UserWarning)] + self.assertEqual(len(w_), 5) self.assertIn( "The following assets were missing factor loadings: " "['TLT']", - str(w[-2].message), + str(w_[-2].message), + ) + self.assertIn( + "Ratio of assets missing: 0.333", str(w_[-2].message) ) - self.assertIn("Ratio of assets missing: 0.333", str(w[-2].message)) self.assertIn( "Could not find factor loadings for " "{} dates".format(len(missing_dates)), - str(w[-1].message), + str(w_[-1].message), ) for date in missing_dates: self.assertNotIn(date, exposures.index) @@ -564,8 +569,10 @@ def test_high_turnover_warning(self): factor_loadings, transactions=transactions, ) + # avoids test failure due to DeprecationWarning for pandas>=1.0, <1.1 + w = [warn for warn in w if issubclass(warn.category, UserWarning)] - self.assertEqual(len(w), 1) + self.assertEqual(len([w]), 1) self.assertIn( "This algorithm has relatively high turnover of its positions.", str(w[-1].message), diff --git a/pyfolio/tests/test_pos.py b/pyfolio/tests/test_pos.py index 9b985b9b..824d6def 100644 --- a/pyfolio/tests/test_pos.py +++ b/pyfolio/tests/test_pos.py @@ -155,10 +155,12 @@ def test_sector_exposure( assert_frame_equal( result_sector_exposure, expected_sector_exposure ) + # avoids test failure due to DeprecationWarning for pandas>=1.0, <1.1 + w_ = [warn for warn in w if issubclass(warn.category, UserWarning)] if warning_expected: - self.assertEqual(len(w), 1) + self.assertEqual(len(w_), 1) else: - self.assertEqual(len(w), 0) + self.assertEqual(len(w_), 0) @parameterized.expand( [ diff --git a/pyproject.toml b/pyproject.toml index 11b5fb27..c41db53e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,3 +18,10 @@ extend-exclude = ''' docs/source/conf.py \) ''' + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-q" +testpaths = [ + "pyfolio/tests" +] diff --git a/setup.cfg b/setup.cfg index 7c8af73d..af8f4d27 100644 --- a/setup.cfg +++ b/setup.cfg @@ -58,7 +58,7 @@ install_requires = scipy>=0.14.0 scikit-learn>=0.16.1 seaborn>=0.7.1 - empyrical-reloaded>=0.5.0 + empyrical-reloaded>=0.5.8 [options.extras_require] all = @@ -69,10 +69,10 @@ all = test = tox>=2.3.1 coverage>=4.0.3 - nose>=1.3.7 + coveralls==3.0.1 + pytest>=6.2 + pytest-cov>=2.12 parameterized>=0.6.1 - nose-ignore-docstring>=0.2 - nose-timer>=0.5.0 flake8>=3.9.1 black diff --git a/tox.ini b/tox.ini index 5b2c9d7f..08c7482c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,11 @@ [tox] -envlist = py{37,38,39}-pandas{10,11,12} +envlist = py{37,38}-pandas{10,11,12}, py39-pandas{11,12} isolated_build = True skip_missing_interpreters = True +requires = setuptools >=42.0.0 + pip >=21.0 + wheel >0.36.0 + tox-gh-actions [gh-actions] python = @@ -27,16 +31,15 @@ setenv = COVERAGE_FILE=.coverage.{envname} deps = - coverage - nose + pytest parameterized - nose-ignore-docstring - nose-timer + pytest-cov + coverage flake8 black commands = - py{37,38,39}-pandas10: pip install -vv pandas>=1.0.0,<1.1.0 + py{37,38}-pandas10: pip install -vv pandas>=1.0.0,<1.1.0 py{37,38,39}-pandas11: pip install -vv pandas>=1.1.0,<1.2.0 py{37,38,39}-pandas12: pip install -vv pandas>=1.2.0 - nosetests pyfolio.tests + pytest --cov=pytest pyfolio/tests