Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add 3.11 test #190

Merged
merged 14 commits into from
Jan 6, 2023
35 changes: 23 additions & 12 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,11 @@ commands:
default: ""
steps:
- restore_cache:
key: deps-v5-<<parameters.python_version>>-<<parameters.pandas_version>>-<<parameters.extras>>-<<parameters.include_dev_dependencies>>-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "setup.py" }}
key: deps-v6-<<parameters.python_version>>-<<parameters.pandas_version>>-<<parameters.extras>>-<<parameters.include_dev_dependencies>>-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "setup.py" }}
- run:
name: Install python deps in venv
environment:
PYENV_VERSION: <<parameters.python_version>>
command: |
if [ -f venv/bin/activate ]
then
Expand Down Expand Up @@ -60,7 +62,7 @@ commands:
fi
fi
- save_cache:
key: deps-v5-<<parameters.python_version>>-<<parameters.pandas_version>>-<<parameters.extras>>-<<parameters.include_dev_dependencies>>-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "setup.py" }}
key: deps-v6-<<parameters.python_version>>-<<parameters.pandas_version>>-<<parameters.extras>>-<<parameters.include_dev_dependencies>>-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "setup.py" }}
paths:
- "venv"
wait_for_db:
Expand Down Expand Up @@ -108,7 +110,7 @@ jobs:
default: false
description: "Enforce coverage not slipping"
docker:
- image: circleci/python:<<parameters.python_version>>
- image: cimg/python:<<parameters.python_version>>
steps:
- checkout
- add_ssh_keys:
Expand Down Expand Up @@ -176,7 +178,7 @@ jobs:
type: string
description: "Command to run in records-mover venv"
docker:
- image: circleci/python:<<parameters.python_version>>
- image: cimg/python:<<parameters.python_version>>
- image: jbfavre/vertica:8.1.1-16_centos-7
environment:
DATABASE_NAME: docker
Expand Down Expand Up @@ -280,7 +282,7 @@ jobs:
default: true

docker:
- image: circleci/python:<<parameters.python_version>>
- image: cimg/python:<<parameters.python_version>>
steps:
- checkout
- add_ssh_keys:
Expand Down Expand Up @@ -311,7 +313,7 @@ jobs:
# This is set by default in the CircleCI environment
unset SCRATCH_GCS_URL
fi
with-db <<parameters.db_name>> nosetests --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
with-db <<parameters.db_name>> pytest --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
- store_test_results:
path: test-reports
- store_artifacts:
Expand All @@ -328,7 +330,7 @@ jobs:
description: "Version of python to test against"
default: '3.9'
docker:
- image: circleci/python:<<parameters.python_version>>
- image: cimg/python:<<parameters.python_version>>
steps:
- checkout
- installvenv:
Expand Down Expand Up @@ -366,7 +368,7 @@ jobs:
twine upload -r pypi dist/*
cli-extra-test:
docker:
- image: circleci/python:3.9
- image: cimg/python:3.9
steps:
- checkout
- installvenv:
Expand Down Expand Up @@ -427,6 +429,15 @@ workflows:
filters:
tags:
only: /v\d+\.\d+\.\d+(-[\w]+)?/
- test:
name: test-3.11
extras: '[unittest,typecheck]'
python_version: "3.11"
pandas_version: "==1.5.2"
coverage: true
filters:
tags:
only: /v\d+\.\d+\.\d+(-[\w]+)?/
# - integration_test_with_dbs:
# name: vertica-no-s3-itest
# extras: '[vertica,itest]'
Expand All @@ -443,7 +454,7 @@ workflows:
# # scratch bucket config.
# #
# unset SCRATCH_S3_URL AWS_SESSION_TOKEN AWS_SECRET_ACCESS_KEY AWS_ACCESS_KEY_ID
# with-db dockerized-vertica nosetests --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
# with-db dockerized-vertica pytest --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
# requires:
# - redshift-s3-itest
# filters:
Expand All @@ -461,7 +472,7 @@ workflows:
export RECORDS_MOVER_SESSION_TYPE=env
mkdir -p test-reports/itest
cd tests/integration/records/single_db
with-db dockerized-postgres nosetests --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
with-db dockerized-postgres pytest --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
requires:
- redshift-s3-itest
filters:
Expand All @@ -483,7 +494,7 @@ workflows:
# export RECORDS_MOVER_SESSION_TYPE=env
# mkdir -p test-reports/itest
# cd tests/integration/records/single_db
# with-db dockerized-mysql nosetests --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
# with-db dockerized-mysql pytest --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
# requires:
# - redshift-s3-itest
# filters:
Expand All @@ -500,7 +511,7 @@ workflows:
export RECORDS_MOVER_SESSION_TYPE=env
mkdir -p test-reports/itest
cd tests/integration/records/single_db
with-db dockerized-vertica nosetests --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
with-db dockerized-vertica pytest --with-xunit --xunit-file=../../../../test-reports/itest/junit.xml .
requires:
- redshift-s3-itest
filters:
Expand Down
6 changes: 3 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ htmlcov/
.tox/
.coverage*
.cache
nosetests.xml
pytest.xml
coverage.xml
*,cover
/test-reports/
Expand All @@ -57,12 +57,12 @@ docs/_build/
# PyBuilder
target/

# nosetest
# pytesttest
/cover
/typecover
/.coverage
/coverage.xml
/nosetests.xml
/pytest.xml
/test-reports

.DS_Store
Expand Down
12 changes: 6 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -25,28 +25,28 @@ citypecoverage: typecoverage
@test -z "$$(git status --porcelain metrics/mypy_high_water_mark)"

unit:
ENV=test nosetests --cover-package=records_mover --cover-erase --with-coverage --with-xunit --cover-html --cover-xml --cover-inclusive tests/unit
ENV=test pytest --cov=records_mover tests/unit
mv .coverage .coverage-unit

component:
ENV=test nosetests --cover-package=records_mover --with-coverage --with-xunit --cover-html --cover-xml --cover-inclusive tests/component
ENV=test pytest --cov=records_mover tests/component
mv .coverage .coverage-component

test: unit component
coverage combine .coverage-unit .coverage-component # https://stackoverflow.com/questions/7352319/nosetests-combined-coverage
coverage combine .coverage-unit .coverage-component # https://stackoverflow.com/questions/7352319/pytest-combined-coverage
coverage html --directory=cover
coverage xml

ciunit:
ENV=test nosetests --cover-package=records_mover --cover-erase --with-coverage --with-xunit --cover-html --cover-xml --cover-inclusive --xunit-file=test-reports/junit.xml tests/unit
ENV=test pytest --cov=records_mover tests/unit
mv .coverage .coverage-unit

cicomponent:
ENV=test nosetests --cover-package=records_mover --with-coverage --with-xunit --cover-html --cover-xml --cover-inclusive --xunit-file=test-reports/junit.xml tests/component
ENV=test pytest --cov=records_mover tests/component
mv .coverage .coverage-component

citest: test-reports ciunit cicomponent
coverage combine .coverage-unit .coverage-component # https://stackoverflow.com/questions/7352319/nosetests-combined-coverage
coverage combine .coverage-unit .coverage-component # https://stackoverflow.com/questions/7352319/pytest-combined-coverage
coverage html --directory=cover
coverage xml

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ flake8
# https://github.com/pallets/markupsafe/issues/284
markupsafe==2.0.1
# https://github.com/pandas-dev/pandas/pull/45749
pandas==1.1.5
# pandas
google-cloud-storage
boto3

Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ ignore_missing_imports = True
[mypy-pyarrow.*]
ignore_missing_imports = True

[mypy-nose.*]
[mypy-pytest.*]
ignore_missing_imports = True

[mypy-airflow.hooks.*]
Expand Down
11 changes: 6 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,16 +120,17 @@ def initialize_options(self) -> None:
'grpcio<2.0dev,>=1.29.0',
]

nose_dependencies = [
'nose'
pytest_dependencies = [

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This file, one of the 3 largest in the project, increased in size to 368 lines. The total size of those files is now 3410 lines (target: 1137). Is this file complex enough to refactor?

'pytest',
'pytest-cov'
]

itest_dependencies = [
'jsonschema', # needed for directory_validator.py
'pytz',
'wheel', # needed to support legacy 'setup.py install'
] + (
nose_dependencies +
pytest_dependencies +
# needed for records_database_fixture retrying drop/creates on
# BigQuery
google_api_client_dependencies
Expand Down Expand Up @@ -192,7 +193,7 @@ def initialize_options(self) -> None:
]

pandas_dependencies = [
'pandas==1.1.5',
'pandas',
]

mysql_dependencies = [
Expand Down Expand Up @@ -255,7 +256,7 @@ def initialize_options(self) -> None:
'coverage',
'mock',
] + (
nose_dependencies +
pytest_dependencies +
cli_dependencies_base +
airflow_dependencies +
gsheet_dependencies +
Expand Down
3 changes: 1 addition & 2 deletions tests/component/records/schema/field/test_dtype.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from nose.tools import assert_equal
from mock import patch
from records_mover.records.schema.field import RecordsSchemaField
from records_mover.records.schema.field.constraints import (
Expand Down Expand Up @@ -29,7 +28,7 @@ def check_dtype(field_type, constraints, expectation):
representations=None,
)
out = field.cast_series_type(pd.Series(1, dtype=np.int8))
assert_equal(out.dtype, expectation)
assert out.dtype == expectation


def test_to_pandas_dtype_integer_no_nullable():
Expand Down
32 changes: 16 additions & 16 deletions tests/integration/itest
Original file line number Diff line number Diff line change
Expand Up @@ -210,91 +210,91 @@ def run_test(args, target, parser):
with dockerized_dbs():
if (args.docker):
docker_compose_run(['with-db', 'dockerized-vertica',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd="/usr/src/app/tests/integration/records/single_db")

else:
with local_dockerized_dbfacts():
subprocess.check_call(['with-db', 'dockerized-vertica',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd=f"{script_dir}/records/single_db")
elif (target == 'vertica-s3'):
with dockerized_dbs(), set_s3_scratch_bucket():
if (args.docker):
docker_compose_run(['with-db', 'dockerized-vertica',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
prefixes=["with-aws-creds", "circleci"],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
with local_dockerized_dbfacts():
subprocess.check_call(["with-db", "dockerized-vertica",
"with-aws-creds", "circleci",
"nosetests", "--xunit-file=nosetests.xml", "."],
"pytest", "--xunit-file=pytest.xml", "."],
cwd=f"{script_dir}/records/single_db")
elif (target == 'mysql'):
with dockerized_dbs():
if (args.docker):
docker_compose_run(['with-db', 'dockerized-mysql',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
prefixes=["with-aws-creds", "circleci"],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
with local_dockerized_dbfacts():
subprocess.check_call(["with-db", "dockerized-mysql",
"nosetests", "--xunit-file=nosetests.xml", "."],
"pytest", "--xunit-file=pytest.xml", "."],
cwd=f"{script_dir}/records/single_db")
elif (target == 'postgres'):
with dockerized_dbs():
if (args.docker):
docker_compose_run(['with-db', 'dockerized-postgres',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
prefixes=["with-aws-creds", "circleci"],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
with local_dockerized_dbfacts():
subprocess.check_call(["with-db", "dockerized-postgres",
"nosetests", "--xunit-file=nosetests.xml", "."],
"pytest", "--xunit-file=pytest.xml", "."],
cwd=f"{script_dir}/records/single_db")
elif (target == 'redshift-s3'):
with set_s3_scratch_bucket():
if (args.docker):
docker_compose_run(['with-db', 'demo-itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
prefixes=["with-aws-creds", "circleci"],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
subprocess.check_call(['with-db', 'demo-itest',
"with-aws-creds", "circleci",
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd=f"{script_dir}/records/single_db")
elif (target == 'redshift-no-s3'):
if (args.docker):
docker_compose_run(['with-db', 'demo-itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
prefixes=["with-aws-creds", "circleci"],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
subprocess.check_call(['with-db', 'demo-itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd=f"{script_dir}/records/single_db")
elif (target == 'bigquery-no-gcs'):
if (args.docker):
docker_compose_run(['with-db', 'bltoolsdevbq-bq_itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
subprocess.check_call(['with-db', 'bltoolsdevbq-bq_itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd=f"{script_dir}/records/single_db")
elif (target == 'bigquery-gcs'):
with set_gcs_scratch_bucket():
if (args.docker):
docker_compose_run(['with-db', 'bltoolsdevbq-bq_itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd="/usr/src/app/tests/integration/records/single_db")
else:
subprocess.check_call(['with-db', 'bltoolsdevbq-bq_itest',
'nosetests', '--xunit-file=nosetests.xml', '.'],
'pytest', '--xunit-file=pytest.xml', '.'],
cwd=f"{script_dir}/records/single_db")
elif (target == 'table2table'):
with set_s3_scratch_bucket(), set_gcs_scratch_bucket(), dockerized_dbs():
Expand Down