From 4bdab80c1880dcf54d778184bcc5b921656e5992 Mon Sep 17 00:00:00 2001 From: riteshghorse Date: Mon, 24 Jul 2023 11:17:43 -0400 Subject: [PATCH] pylints --- .../python/apache_beam/ml/inference/huggingface_inference.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sdks/python/apache_beam/ml/inference/huggingface_inference.py b/sdks/python/apache_beam/ml/inference/huggingface_inference.py index 19b8ed02fa9d..35c3a1686c70 100644 --- a/sdks/python/apache_beam/ml/inference/huggingface_inference.py +++ b/sdks/python/apache_beam/ml/inference/huggingface_inference.py @@ -33,7 +33,6 @@ from apache_beam.ml.inference import utils from apache_beam.ml.inference.base import ModelHandler from apache_beam.ml.inference.base import PredictionResult -from apache_beam.ml.inference.base import PredictionT from apache_beam.ml.inference.pytorch_inference import _convert_to_device from transformers import AutoModel from transformers import TFAutoModel @@ -166,7 +165,7 @@ def __init__( framework: str, device: str = "CPU", *, - inference_fn: Optional[Callable[..., Iterable[PredictionT]]] = None, + inference_fn: Optional[Callable[..., Iterable[PredictionResult]]] = None, load_model_args: Optional[Dict[str, Any]] = None, inference_args: Optional[Dict[str, Any]] = None, min_batch_size: Optional[int] = None, @@ -345,7 +344,7 @@ def __init__( model_class: Union[AutoModel, TFAutoModel], device: str = "CPU", *, - inference_fn: Optional[Callable[..., Iterable[PredictionT]]] = None, + inference_fn: Optional[Callable[..., Iterable[PredictionResult]]] = None, load_model_args: Optional[Dict[str, Any]] = None, inference_args: Optional[Dict[str, Any]] = None, min_batch_size: Optional[int] = None,