diff --git a/neural_compressor/common/utils/logger.py b/neural_compressor/common/utils/logger.py index a7f0b06009f..ffb420679b5 100644 --- a/neural_compressor/common/utils/logger.py +++ b/neural_compressor/common/utils/logger.py @@ -17,6 +17,7 @@ """Logger: handles logging functionalities.""" +import functools import logging import os @@ -137,6 +138,12 @@ def warning(msg, *args, **kwargs): else: Logger().get_logger().warning(msg, *args, **kwargs) + @functools.lru_cache(None) + def warning_once(msg, *args, **kwargs): + """Output log with the warning level only once.""" + Logger.warning("Below warning will be shown only once:") + Logger.warning(msg, *args, **kwargs) + level = Logger().get_logger().level level_name = logging.getLevelName(level) diff --git a/neural_compressor/torch/algorithms/weight_only/modules.py b/neural_compressor/torch/algorithms/weight_only/modules.py index ff27f579aa9..a89d65ae816 100644 --- a/neural_compressor/torch/algorithms/weight_only/modules.py +++ b/neural_compressor/torch/algorithms/weight_only/modules.py @@ -25,7 +25,7 @@ from torch.autograd import Function from torch.nn import functional as F -from neural_compressor.torch.utils import accelerator, logger +from neural_compressor.torch.utils import accelerator, can_pack_with_numba, logger from .utility import quant_tensor @@ -453,15 +453,7 @@ def pack_array_with_numba( # Try to pack with numba to accelerate the packing process. # If numba is not availabll or the packing method is not supported, # fallback to the torch implementation. - try: - import numba - - numba.config.THREADING_LAYER = "safe" - except ImportError: - logger.warning("To accelerate packing, please install numba with `pip install numba tbb`.") - return self.pack_tensor_with_torch(torch.from_numpy(raw_array)).cpu().numpy() - except Exception as e: - logger.warning(f"Import numba failed with error: {e}, fallback to torch implementation.") + if not can_pack_with_numba(): return self.pack_tensor_with_torch(torch.from_numpy(raw_array)).cpu().numpy() from neural_compressor.torch.utils.bit_packer import bit_packers diff --git a/neural_compressor/torch/utils/environ.py b/neural_compressor/torch/utils/environ.py index 0ee2bd23cee..9a506cf78fa 100644 --- a/neural_compressor/torch/utils/environ.py +++ b/neural_compressor/torch/utils/environ.py @@ -20,6 +20,8 @@ import torch from packaging.version import Version +from neural_compressor.common.utils import logger + ################ Check imported sys.module first to decide behavior ################# def is_ipex_imported() -> bool: @@ -160,3 +162,66 @@ def new_func(*args, **kwargs): return output return new_func + + +def can_pack_with_numba(): # pragma: no cover + """Check if Numba and TBB are available for packing. + + To pack tensor with Numba, both Numba and TBB are required, and TBB should be configured correctly. + """ + if not is_numba_available(): + logger.warning_once("Numba is not installed, please install it with `pip install numba`.") + return False + if not is_tbb_available(): + return False + return True + + +def is_numba_available(): # pragma: no cover + """Check if Numba is available.""" + try: + import numba + + return True + except ImportError: + return False + + +def _is_tbb_installed(): # pragma: no cover + import importlib.metadata + + try: + importlib.metadata.version("tbb") + return True + except importlib.metadata.PackageNotFoundError: + return False + + +def _is_tbb_configured(): # pragma: no cover + try: + from numba.np.ufunc.parallel import _check_tbb_version_compatible + + # check if TBB is present and compatible + _check_tbb_version_compatible() + + return True + except ImportError as e: + logger.warning_once(f"TBB not available: {e}") + return False + + +def is_tbb_available(): # pragma: no cover + """Check if TBB is available.""" + if not _is_tbb_installed(): + logger.warning_once("TBB is not installed, please install it with `pip install tbb`.") + return False + if not _is_tbb_configured(): + logger.warning_once( + ( + "TBB is installed but not configured correctly. \n" + "Please add the TBB library path to `LD_LIBRARY_PATH`, " + "for example: `export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib/`." + ) + ) + return False + return True diff --git a/test/3x/common/test_logger.py b/test/3x/common/test_logger.py index 69da729017a..a3366762b00 100644 --- a/test/3x/common/test_logger.py +++ b/test/3x/common/test_logger.py @@ -8,6 +8,7 @@ """ import unittest +from unittest.mock import patch from neural_compressor.common.utils import Logger @@ -72,6 +73,20 @@ def test_logger(self): # info(msg) # warning(msg) + @patch.object(Logger, "warning") + def test_warning_once(self, mock_method): + + warning_message = "test warning message" + # First call + Logger.warning_once(warning_message) + mock_method.assert_called_with(warning_message) + # Second call + Logger.warning_once(warning_message) + Logger.warning_once(warning_message) + # Call `warning_once` 3 times, but `warning` should only be called twice, + # one for help message and one for warning message. + assert mock_method.call_count == 2, "Expected warning to be called twice." + if __name__ == "__main__": unittest.main() diff --git a/test/3x/torch/utils/test_torch_utility.py b/test/3x/torch/utils/test_torch_utility.py index b84db61ff7a..e3bff570fc2 100644 --- a/test/3x/torch/utils/test_torch_utility.py +++ b/test/3x/torch/utils/test_torch_utility.py @@ -1,6 +1,9 @@ +from unittest.mock import patch + import pytest import torch +import neural_compressor.torch.utils.environ as inc_torch_env from neural_compressor.torch.utils.utility import get_double_quant_config_dict @@ -77,3 +80,21 @@ def test_get_model_info(self): def test_double_quant_config_dict(self, double_quant_type): config_dict = get_double_quant_config_dict(double_quant_type) assert isinstance(config_dict, dict), "The returned object should be a dict." + + +class TestPackingWithNumba: + + @patch.object(inc_torch_env, "_is_tbb_installed", lambda: False) + def test_tbb_not_installed(self): + assert inc_torch_env.is_tbb_available() is False, "`is_tbb_available` should return False." + assert inc_torch_env.can_pack_with_numba() is False, "`can_pack_with_numba` should return False." + + @patch.object(inc_torch_env, "_is_tbb_installed", lambda: True) + @patch.object(inc_torch_env, "_is_tbb_configured", lambda: False) + def test_tbb_installed_but_not_configured_right(self): + assert inc_torch_env.is_tbb_available() is False, "`is_tbb_available` should return False." + assert inc_torch_env.can_pack_with_numba() is False, "`can_pack_with_numba` should return False." + + @patch.object(inc_torch_env, "is_numba_available", lambda: False) + def test_numba_not_installed(self): + assert inc_torch_env.can_pack_with_numba() is False, "`can_pack_with_numba` should return False."