Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

⚠️ Time to say goodbye to py37 #24091

Merged
merged 10 commits into from
Jun 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .github/conda/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ requirements:
- pip
- numpy >=1.17
- dataclasses
- importlib_metadata
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not very sure what this file is for. It's under .github/conda ..(?)

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it automates the releases on conda, but basically it doesn't need dataclasses anymore (was for Python 3.7) and won't need importlib_metadata after python 3.8

- huggingface_hub
- packaging
- filelock
Expand All @@ -31,7 +30,6 @@ requirements:
- python
- numpy >=1.17
- dataclasses
- importlib_metadata
- huggingface_hub
- packaging
- filelock
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.7
python-version: 3.8

- name: Install requirements
run: |
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ You will need basic `git` proficiency to contribute to
manual. Type `git --help` in a shell and enjoy! If you prefer books, [Pro
Git](https://git-scm.com/book/en/v2) is a very good reference.

You'll need **[Python 3.7]((https://github.com/huggingface/transformers/blob/main/setup.py#L426))** or above to contribute to 🤗 Transformers. Follow the steps below to start contributing:
You'll need **[Python 3.8]((https://github.com/huggingface/transformers/blob/main/setup.py#L426))** or above to contribute to 🤗 Transformers. Follow the steps below to start contributing:

1. Fork the [repository](https://github.com/huggingface/transformers) by
clicking on the **[Fork](https://github.com/huggingface/transformers/fork)** button on the repository's page. This creates a copy of the code
Expand Down
6 changes: 2 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
"pytest>=7.2.0",
"pytest-timeout",
"pytest-xdist",
"python>=3.7.0",
"python>=3.8.0",
"ray[tune]",
"regex!=2019.12.17",
"requests",
Expand Down Expand Up @@ -413,7 +413,6 @@ def run(self):

# when modifying the following list, make sure to update src/transformers/dependency_versions_check.py
install_requires = [
deps["importlib_metadata"] + ";python_version<'3.8'", # importlib_metadata for Python versions that don't have it
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

deps["filelock"], # filesystem locks, e.g., to prevent parallel downloads
deps["huggingface-hub"],
deps["numpy"],
Expand Down Expand Up @@ -444,7 +443,7 @@ def run(self):
zip_safe=False,
extras_require=extras,
entry_points={"console_scripts": ["transformers-cli=transformers.commands.transformers_cli:main"]},
python_requires=">=3.7.0",
python_requires=">=3.8.0",
install_requires=install_requires,
classifiers=[
"Development Status :: 5 - Production/Stable",
Expand All @@ -454,7 +453,6 @@ def run(self):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
Expand Down
4 changes: 0 additions & 4 deletions src/transformers/dependency_versions_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

from .dependency_versions_table import deps
from .utils.versions import require_version, require_version_core
Expand All @@ -38,9 +37,6 @@
"pyyaml",
]

if sys.version_info < (3, 8):
pkgs_to_check_at_runtime.append("importlib_metadata")

for pkg in pkgs_to_check_at_runtime:
if pkg in deps:
if pkg == "tokenizers":
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/dependency_versions_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
"pytest": "pytest>=7.2.0",
"pytest-timeout": "pytest-timeout",
"pytest-xdist": "pytest-xdist",
"python": "python>=3.7.0",
"python": "python>=3.8.0",
"ray[tune]": "ray[tune]",
"regex": "regex!=2019.12.17",
"requests": "requests",
Expand Down
10 changes: 1 addition & 9 deletions src/transformers/hf_argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,11 @@
from enum import Enum
from inspect import isclass
from pathlib import Path
from typing import Any, Callable, Dict, Iterable, List, NewType, Optional, Tuple, Union, get_type_hints
from typing import Any, Callable, Dict, Iterable, List, Literal, NewType, Optional, Tuple, Union, get_type_hints

import yaml


try:
# For Python versions <3.8, Literal is not in typing: https://peps.python.org/pep-0586/
from typing import Literal
except ImportError:
# For Python 3.7
from typing_extensions import Literal


DataClass = NewType("DataClass", Any)
DataClassType = NewType("DataClassType", Any)

Expand Down
14 changes: 6 additions & 8 deletions src/transformers/integrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
Integrations with other Python libraries.
"""
import functools
import importlib.metadata
import importlib.util
import json
import numbers
Expand All @@ -31,7 +32,6 @@

from . import __version__ as version
from .utils import flatten_dict, is_datasets_available, is_pandas_available, is_torch_available, logging
from .utils.versions import importlib_metadata


logger = logging.get_logger(__name__)
Expand Down Expand Up @@ -59,13 +59,13 @@
)
if TYPE_CHECKING and _has_neptune:
try:
_neptune_version = importlib_metadata.version("neptune")
_neptune_version = importlib.metadata.version("neptune")
logger.info(f"Neptune version {_neptune_version} available.")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
try:
_neptune_version = importlib_metadata.version("neptune-client")
_neptune_version = importlib.metadata.version("neptune-client")
logger.info(f"Neptune-client version {_neptune_version} available.")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_has_neptune = False

from .trainer_callback import ProgressCallback, TrainerCallback # noqa: E402
Expand Down Expand Up @@ -367,10 +367,8 @@ def dynamic_modules_import_trainable(*args, **kwargs):
def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
import sigopt

from transformers.utils.versions import importlib_metadata

if trainer.args.process_index == 0:
if importlib_metadata.version("sigopt") >= "8.0.0":
if importlib.metadata.version("sigopt") >= "8.0.0":
sigopt.set_project("huggingface")

experiment = sigopt.create_experiment(
Expand Down
11 changes: 6 additions & 5 deletions src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# limitations under the License.
import collections
import gc
import importlib.metadata
import inspect
import json
import os
Expand Down Expand Up @@ -73,7 +74,7 @@
replace_return_docstrings,
)
from .utils.hub import convert_file_size_to_int, get_checkpoint_shard_files
from .utils.import_utils import ENV_VARS_TRUE_VALUES, importlib_metadata, is_sagemaker_mp_enabled
from .utils.import_utils import ENV_VARS_TRUE_VALUES, is_sagemaker_mp_enabled
from .utils.quantization_config import BitsAndBytesConfig
from .utils.versions import require_version_core

Expand Down Expand Up @@ -2202,7 +2203,7 @@ def from_pretrained(
use_safetensors = False

if is_bitsandbytes_available():
is_8bit_serializable = version.parse(importlib_metadata.version("bitsandbytes")) > version.parse("0.37.2")
is_8bit_serializable = version.parse(importlib.metadata.version("bitsandbytes")) > version.parse("0.37.2")
else:
is_8bit_serializable = False

Expand Down Expand Up @@ -2737,7 +2738,7 @@ def from_pretrained(

modules_to_not_convert.extend(keys_on_cpu)

supports_4bit = version.parse(importlib_metadata.version("bitsandbytes")) >= version.parse("0.39.0")
supports_4bit = version.parse(importlib.metadata.version("bitsandbytes")) >= version.parse("0.39.0")

if load_in_4bit and not supports_4bit:
raise ValueError(
Expand All @@ -2750,7 +2751,7 @@ def from_pretrained(
)
# training in 8-bit is only available in 0.37.0+
model._is_quantized_training_enabled = version.parse(
importlib_metadata.version("bitsandbytes")
importlib.metadata.version("bitsandbytes")
) >= version.parse("0.37.0")

model.config.quantization_config = quantization_config
Expand Down Expand Up @@ -2785,7 +2786,7 @@ def from_pretrained(
target_dtype = torch_dtype

if load_in_4bit:
if version.parse(importlib_metadata.version("accelerate")) > version.parse("0.19.0"):
if version.parse(importlib.metadata.version("accelerate")) > version.parse("0.19.0"):
from accelerate.utils import CustomDtype

target_dtype = CustomDtype.INT4
Expand Down
5 changes: 3 additions & 2 deletions src/transformers/utils/bitsandbytes.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import importlib.metadata
import warnings
from copy import deepcopy

from packaging import version

from ..utils import logging
from .import_utils import importlib_metadata, is_accelerate_available, is_bitsandbytes_available
from .import_utils import is_accelerate_available, is_bitsandbytes_available


if is_bitsandbytes_available():
Expand Down Expand Up @@ -73,7 +74,7 @@ class `Int8Params` from `bitsandbytes`.
elif isinstance(value, torch.Tensor):
new_value = value.to("cpu")
if value.dtype == torch.int8:
is_8bit_serializable = version.parse(importlib_metadata.version("bitsandbytes")) > version.parse(
is_8bit_serializable = version.parse(importlib.metadata.version("bitsandbytes")) > version.parse(
"0.37.2"
)
if not is_8bit_serializable:
Expand Down
32 changes: 16 additions & 16 deletions src/transformers/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
Import utilities: Utilities related to imports and our lazy inits.
"""

import importlib.metadata
import importlib.util
import json
import os
Expand All @@ -31,7 +32,6 @@
from packaging import version

from . import logging
from .versions import importlib_metadata


logger = logging.get_logger(__name__) # pylint: disable=invalid-name
Expand All @@ -44,9 +44,9 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
package_version = "N/A"
if package_exists:
try:
package_version = importlib_metadata.version(pkg_name)
package_version = importlib.metadata.version(pkg_name)
package_exists = True
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
package_exists = False
logger.debug(f"Detected {pkg_name} version {package_version}")
if return_version:
Expand All @@ -71,7 +71,7 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
_accelerate_available, _accelerate_version = _is_package_available("accelerate", return_version=True)
_apex_available = _is_package_available("apex")
_bitsandbytes_available = _is_package_available("bitsandbytes")
# `importlib_metadata.version` doesn't work with `bs4` but `beautifulsoup4`. For `importlib.util.find_spec`, reversed.
# `importlib.metadata.version` doesn't work with `bs4` but `beautifulsoup4`. For `importlib.util.find_spec`, reversed.
_bs4_available = importlib.util.find_spec("bs4") is not None
_coloredlogs_available = _is_package_available("coloredlogs")
_datasets_available = _is_package_available("datasets")
Expand All @@ -80,13 +80,13 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
# We need to check both `faiss` and `faiss-cpu`.
_faiss_available = importlib.util.find_spec("faiss") is not None
try:
_faiss_version = importlib_metadata.version("faiss")
_faiss_version = importlib.metadata.version("faiss")
logger.debug(f"Successfully imported faiss version {_faiss_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
try:
_faiss_version = importlib_metadata.version("faiss-cpu")
_faiss_version = importlib.metadata.version("faiss-cpu")
logger.debug(f"Successfully imported faiss version {_faiss_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_faiss_available = False
_ftfy_available = _is_package_available("ftfy")
_ipex_available, _ipex_version = _is_package_available("intel_extension_for_pytorch", return_version=True)
Expand Down Expand Up @@ -115,8 +115,8 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
_sklearn_available = importlib.util.find_spec("sklearn") is not None
if _sklearn_available:
try:
importlib_metadata.version("scikit-learn")
except importlib_metadata.PackageNotFoundError:
importlib.metadata.version("scikit-learn")
except importlib.metadata.PackageNotFoundError:
_sklearn_available = False
_smdistributed_available = importlib.util.find_spec("smdistributed") is not None
_soundfile_available = _is_package_available("soundfile")
Expand Down Expand Up @@ -168,9 +168,9 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
# For the metadata, we have to look for both tensorflow and tensorflow-cpu
for pkg in candidates:
try:
_tf_version = importlib_metadata.version(pkg)
_tf_version = importlib.metadata.version(pkg)
break
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
pass
_tf_available = _tf_version is not None
if _tf_available:
Expand All @@ -189,9 +189,9 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
or importlib.util.find_spec("oneccl_bindings_for_pytorch") is not None
)
try:
ccl_version = importlib_metadata.version("oneccl_bind_pt")
ccl_version = importlib.metadata.version("oneccl_bind_pt")
logger.debug(f"Detected oneccl_bind_pt version {ccl_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_is_ccl_available = False


Expand Down Expand Up @@ -530,8 +530,8 @@ def is_vision_available():
_pil_available = importlib.util.find_spec("PIL") is not None
if _pil_available:
try:
package_version = importlib_metadata.version("Pillow")
except importlib_metadata.PackageNotFoundError:
package_version = importlib.metadata.version("Pillow")
except importlib.metadata.PackageNotFoundError:
return False
logger.debug(f"Detected PIL version {package_version}")
return _pil_available
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/utils/quantization_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import importlib.metadata
import json
import os
from dataclasses import dataclass
Expand All @@ -23,7 +24,6 @@
from packaging import version

from ..utils import is_torch_available, logging
from ..utils.import_utils import importlib_metadata


if is_torch_available():
Expand Down Expand Up @@ -141,7 +141,7 @@ def post_init(self):
if not isinstance(self.bnb_4bit_use_double_quant, bool):
raise ValueError("bnb_4bit_use_double_quant must be a boolean")

if self.load_in_4bit and not version.parse(importlib_metadata.version("bitsandbytes")) >= version.parse(
if self.load_in_4bit and not version.parse(importlib.metadata.version("bitsandbytes")) >= version.parse(
"0.39.0"
):
raise ValueError(
Expand Down
Loading