Skip to content

Commit

Permalink
fix: update minimum dependency versions
Browse files Browse the repository at this point in the history
This PR updates the minimum dependency versions to match those that I
found to be actually runnable. Updates tests to use constraint files so
that at least one test session uses these minimum versions.
  • Loading branch information
tswast committed Sep 21, 2020
1 parent 6160fee commit 85b9125
Show file tree
Hide file tree
Showing 14 changed files with 125 additions and 67 deletions.
74 changes: 49 additions & 25 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

from __future__ import absolute_import

import pathlib
import os
import shutil

Expand All @@ -22,6 +23,7 @@

BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ("docs", "google", "samples", "tests", "noxfile.py", "setup.py")
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()


def default(session):
Expand All @@ -32,27 +34,33 @@ def default(session):
Python corresponding to the ``nox`` binary the ``PATH`` can
run the tests.
"""
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)

# Install all test dependencies, then install local packages in-place.
session.install(
"mock", "pytest", "google-cloud-testutils", "pytest-cov", "freezegun"
"mock",
"pytest",
"google-cloud-testutils",
"pytest-cov",
"freezegun",
"-c",
constraints_path,
)
session.install("grpcio")

# fastparquet is not included in .[all] because, in general, it's redundant
# with pyarrow. We still want to run some unit tests with fastparquet
# serialization, though.
session.install("-e", ".[all,fastparquet]")

# IPython does not support Python 2 after version 5.x
if session.python == "2.7":
session.install("ipython==5.5")
# The [all] extra is not installable on Python 2.7.
session.install("-e", ".[pandas]", "-c", constraints_path)
elif session.python == "3.5":
session.install("-e", ".[all]", "-c", constraints_path)
else:
session.install("ipython")
# fastparquet is not included in .[all] because, in general, it's
# redundant with pyarrow. We still want to run some unit tests with
# fastparquet serialization, though.
session.install("-e", ".[all,fastparquet]", "-c", constraints_path)

# opentelemetry was not added to [all] because opentelemetry does not support Python 2.
# Exporter does not need to be in nox thus it has been added to README documentation
if session.python != "2.7":
session.install("-e", ".[opentelemetry]")
session.install("ipython", "-c", constraints_path)

# Run py.test against the unit tests.
session.run(
Expand All @@ -79,6 +87,10 @@ def unit(session):
def system(session):
"""Run the system test suite."""

constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)

# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
Expand All @@ -88,18 +100,21 @@ def system(session):
session.skip("Credentials must be set via environment variable.")

# Use pre-release gRPC for system tests.
session.install("--pre", "grpcio")
session.install("--pre", "grpcio", "-c", constraints_path)

# Install all test dependencies, then install local packages in place.
session.install("mock", "pytest", "psutil", "google-cloud-testutils")
session.install("google-cloud-storage")
session.install("-e", ".[all]")
session.install(
"mock", "pytest", "psutil", "google-cloud-testutils", "-c", constraints_path
)
session.install("google-cloud-storage", "-c", constraints_path)

# IPython does not support Python 2 after version 5.x
if session.python == "2.7":
session.install("ipython==5.5")
# The [all] extra is not installable on Python 2.7.
session.install("-e", ".[pandas]", "-c", constraints_path)
else:
session.install("ipython")
session.install("-e", ".[all]", "-c", constraints_path)

session.install("ipython", "-c", constraints_path)

# Run py.test against the system tests.
session.run(
Expand All @@ -111,15 +126,24 @@ def system(session):
def snippets(session):
"""Run the snippets test suite."""

constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)

# Sanity check: Only run snippets tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable.")

# Install all test dependencies, then install local packages in place.
session.install("mock", "pytest", "google-cloud-testutils")
session.install("google-cloud-storage")
session.install("grpcio")
session.install("-e", ".[all]")
session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
session.install("google-cloud-storage", "-c", constraints_path)
session.install("grpcio", "-c", constraints_path)

if session.python == "2.7":
# The [all] extra is not installable on Python 2.7.
session.install("-e", ".[pandas]", "-c", constraints_path)
else:
session.install("-e", ".[all]", "-c", constraints_path)

# Run py.test against the snippets tests.
# Skip tests in samples/snippets, as those are run in a different session
Expand Down
8 changes: 4 additions & 4 deletions samples/snippets/jupyter_tutorial_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import IPython
from IPython.terminal import interactiveshell
from IPython.testing import tools
import matplotlib
import pytest

IPython = pytest.importorskip("IPython")
interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell")
tools = pytest.importorskip("IPython.testing.tools")
matplotlib = pytest.importorskip("matplotlib")

# Ignore semicolon lint warning because semicolons are used in notebooks
# flake8: noqa E703
Expand Down
4 changes: 3 additions & 1 deletion samples/tests/test_query_to_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import pyarrow
import pytest

from .. import query_to_arrow

pyarrow = pytest.importorskip("pyarrow")


def test_query_to_arrow(capsys,):

Expand Down
22 changes: 10 additions & 12 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
'enum34; python_version < "3.4"',
"google-api-core >= 1.21.0, < 2.0dev",
"google-cloud-core >= 1.4.1, < 2.0dev",
"google-resumable-media >= 0.5.0, < 2.0dev",
"google-resumable-media >= 0.6.0, < 2.0dev",
"six >=1.13.0,< 2.0.0dev",
]
extras = {
Expand All @@ -41,18 +41,18 @@
# Due to an issue in pip's dependency resolver, the `grpc` extra is not
# installed, even though `google-cloud-bigquery-storage` specifies it
# as `google-api-core[grpc]`. We thus need to explicitly specify it here.
# See: https://github.com/googleapis/python-bigquery/issues/83
"grpcio >= 1.8.2, < 2.0dev",
"pyarrow >= 1.0.0, < 2.0dev; python_version >= '3.5'",
# See: https://github.com/googleapis/python-bigquery/issues/83 The
# grpc.Channel.close() method isn't added until 1.32.0.
# https://github.com/grpc/grpc/pull/15254
"grpcio >= 1.32.0, < 2.0dev",
"pyarrow >= 1.0.0, < 2.0dev",
],
"pandas": ["pandas>=0.17.1"],
# Exclude PyArrow dependency from Windows Python 2.7.
"pandas": ["pandas>=0.23.0"],
"pyarrow": [
"pyarrow >= 1.0.0, < 2.0dev; python_version >= '3.5'",
# Pyarrow >= 0.17.0 is not compatible with Python 2 anymore.
"pyarrow < 0.17.0; python_version < '3.0' and platform_system != 'Windows'",
# pyarrow 1.0.0 is required for the use of timestamp_as_object keyword.
"pyarrow >= 1.0.0, < 2.0dev",
],
"tqdm": ["tqdm >= 4.0.0, <5.0.0dev"],
"tqdm": ["tqdm >= 4.7.4, <5.0.0dev"],
"fastparquet": [
"fastparquet",
"python-snappy",
Expand All @@ -77,8 +77,6 @@
# creates a dependency on pre-release versions of numpy. See:
# https://github.com/googleapis/google-cloud-python/issues/8549
"fastparquet",
# Skip opentelemetry because the library is not compatible with Python 2.
"opentelemetry",
):
continue
all_extras.extend(extras[extra])
Expand Down
8 changes: 8 additions & 0 deletions testing/constraints-2.7.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
google-api-core==1.21.0
google-cloud-core==1.4.1
google-cloud-storage==1.30.0
google-resumable-media==0.6.0
ipython==5.5
pandas==0.23.0
six==1.13.0
tqdm==4.7.4
12 changes: 12 additions & 0 deletions testing/constraints-3.5.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
google-api-core==1.21.0
google-cloud-bigquery-storage==1.0.0
google-cloud-core==1.4.1
google-resumable-media==0.6.0
google-cloud-storage==1.30.0
grpcio==1.32.0
ipython==5.5
# pandas 0.23.0 is the first version to work with pyarrow to_pandas.
pandas==0.23.0
pyarrow==1.0.0
six==1.13.0
tqdm==4.7.4
Empty file added testing/constraints-3.6.txt
Empty file.
Empty file added testing/constraints-3.7.txt
Empty file.
Empty file added testing/constraints-3.8.txt
Empty file.
19 changes: 14 additions & 5 deletions tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@

from google.api_core.exceptions import PreconditionFailed
from google.api_core.exceptions import BadRequest
from google.api_core.exceptions import ClientError
from google.api_core.exceptions import Conflict
from google.api_core.exceptions import Forbidden
from google.api_core.exceptions import GoogleAPICallError
Expand Down Expand Up @@ -130,9 +131,17 @@
)

PANDAS_MINIMUM_VERSION = pkg_resources.parse_version("1.0.0")
PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
PYARROW_MINIMUM_VERSION = pkg_resources.parse_version("0.17.0")
PYARROW_INSTALLED_VERSION = pkg_resources.get_distribution("pyarrow").parsed_version

if pandas:
PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
else:
PANDAS_INSTALLED_VERSION = None

if pyarrow:
PYARROW_INSTALLED_VERSION = pkg_resources.get_distribution("pyarrow").parsed_version
else:
PYARROW_INSTALLED_VERSION = None


def _has_rows(result):
Expand Down Expand Up @@ -1312,9 +1321,9 @@ def test_load_table_from_file_w_explicit_location(self):
self.assertEqual("EU", load_job.location)

# Cannot cancel the job from the US.
with self.assertRaises(NotFound):
with self.assertRaises(ClientError):
client.cancel_job(job_id, location="US")
with self.assertRaises(NotFound):
with self.assertRaises(ClientError):
load_job_us.cancel()

# Can list the table rows.
Expand Down Expand Up @@ -2897,7 +2906,7 @@ def test_bigquery_magic():
LIMIT 10
"""
with io.capture_output() as captured:
result = ip.run_cell_magic("bigquery", "", sql)
result = ip.run_cell_magic("bigquery", "--use_rest_api", sql)

conn_count_end = len(current_process.connections())

Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
pyarrow = None

import google.api_core.exceptions
from google.api_core.gapic_v1 import client_info
from google.api_core import client_info
import google.cloud._helpers
from google.cloud import bigquery_v2
from google.cloud.bigquery.dataset import DatasetReference
Expand Down
14 changes: 11 additions & 3 deletions tests/unit/test_dbapi_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,11 @@ def _mock_client(self):
return mock_client

def _mock_bqstorage_client(self):
from google.cloud.bigquery_storage_v1 import client

mock_client = mock.create_autospec(client.BigQueryReadClient)
if bigquery_storage_v1 is None:
return None
mock_client = mock.create_autospec(
bigquery_storage_v1.client.BigQueryReadClient
)
mock_client.transport = mock.Mock(spec=["channel"])
mock_client.transport.channel = mock.Mock(spec=["close"])
return mock_client
Expand Down Expand Up @@ -127,6 +129,9 @@ def test_raises_error_if_closed(self):
):
getattr(connection, method)()

@unittest.skipIf(
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_close_closes_all_created_bigquery_clients(self):
client = self._mock_client()
bqstorage_client = self._mock_bqstorage_client()
Expand All @@ -147,6 +152,9 @@ def test_close_closes_all_created_bigquery_clients(self):
self.assertTrue(client.close.called)
self.assertTrue(bqstorage_client.transport.channel.close.called)

@unittest.skipIf(
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_close_does_not_close_bigquery_clients_passed_to_it(self):
client = self._mock_client()
bqstorage_client = self._mock_bqstorage_client()
Expand Down
17 changes: 5 additions & 12 deletions tests/unit/test_magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,10 @@
import pandas
except ImportError: # pragma: NO COVER
pandas = None
try:
import IPython
from IPython.utils import io
from IPython.testing import tools
from IPython.terminal import interactiveshell
except ImportError: # pragma: NO COVER
IPython = None

from google.api_core import exceptions
import google.auth.credentials

try:
from google.cloud import bigquery_storage_v1
except ImportError: # pragma: NO COVER
bigquery_storage_v1 = None
from google.cloud import bigquery
from google.cloud.bigquery import job
from google.cloud.bigquery import table
Expand All @@ -48,7 +37,11 @@
from test_utils.imports import maybe_fail_import


pytestmark = pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
IPython = pytest.importorskip("IPython")
io = pytest.importorskip("IPython.utils.io")
tools = pytest.importorskip("IPython.testing.tools")
interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell")
bigquery_storage_v1 = pytest.importorskip("google.cloud.bigquery_storage_v1")


@pytest.fixture(scope="session")
Expand Down
12 changes: 8 additions & 4 deletions tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2472,7 +2472,10 @@ def test_to_dataframe_no_tqdm_no_progress_bar(self):
with warnings.catch_warnings(record=True) as warned:
df = row_iterator.to_dataframe(create_bqstorage_client=False)

self.assertEqual(len(warned), 0)
user_warnings = [
warning for warning in warned if warning.category is UserWarning
]
self.assertEqual(len(user_warnings), 0)
self.assertEqual(len(df), 4)

@unittest.skipIf(pandas is None, "Requires `pandas`")
Expand All @@ -2499,9 +2502,10 @@ def test_to_dataframe_no_tqdm(self):
progress_bar_type="tqdm", create_bqstorage_client=False,
)

self.assertEqual(len(warned), 1)
for warning in warned:
self.assertIs(warning.category, UserWarning)
user_warnings = [
warning for warning in warned if warning.category is UserWarning
]
self.assertEqual(len(user_warnings), 1)

# Even though the progress bar won't show, downloading the dataframe
# should still work.
Expand Down

0 comments on commit 85b9125

Please sign in to comment.