Skip to content

Commit

Permalink
Enhance auto-tune module (#1608)
Browse files Browse the repository at this point in the history
Signed-off-by: Kaihui-intel <[email protected]>
Signed-off-by: yiliu30 <[email protected]>
Signed-off-by: chensuyue <[email protected]>
Co-authored-by: Kaihui-intel <[email protected]>
Co-authored-by: chensuyue <[email protected]>
  • Loading branch information
3 people authored Feb 7, 2024
1 parent 191383e commit ac47d9b
Show file tree
Hide file tree
Showing 15 changed files with 345 additions and 98 deletions.
3 changes: 2 additions & 1 deletion .azure-pipelines/scripts/codeScan/pylint/pylint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ pip install torch \
prettytable \
psutil \
py-cpuinfo \
pyyaml
pyyaml \
pydantic \

if [ "${scan_module}" = "neural_solution" ]; then
cd /neural-compressor
Expand Down
2 changes: 2 additions & 0 deletions neural_compressor/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
set_resume_from,
set_workspace,
set_tensorboard,
dump_elapsed_time,
)
from neural_compressor.common.base_config import options

Expand All @@ -33,4 +34,5 @@
"set_random_seed",
"set_resume_from",
"set_tensorboard",
"dump_elapsed_time",
]
54 changes: 44 additions & 10 deletions neural_compressor/common/base_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from __future__ import annotations

import inspect
import json
import re
from abc import ABC, abstractmethod
Expand All @@ -25,6 +26,7 @@
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union

from neural_compressor.common import Logger
from neural_compressor.common.tuning_param import TuningParam
from neural_compressor.common.utils import (
BASE_CONFIG,
COMPOSABLE_CONFIG,
Expand Down Expand Up @@ -295,6 +297,15 @@ def __add__(self, other: BaseConfig) -> BaseConfig:
else:
return ComposableConfig(configs=[self, other])

@staticmethod
def get_the_default_value_of_param(config: BaseConfig, param: str) -> Any:
# Get the signature of the __init__ method
signature = inspect.signature(config.__init__)

# Get the parameters and their default values
parameters = signature.parameters
return parameters.get(param).default

def expand(self) -> List[BaseConfig]:
"""Expand the config.
Expand Down Expand Up @@ -331,19 +342,42 @@ def expand(self) -> List[BaseConfig]:
"""
config_list: List[BaseConfig] = []
params_list = self.params_list
params_dict = OrderedDict()
config = self
tuning_param_list = []
not_tuning_param_pair = {} # key is the param name, value is the user specified value
for param in params_list:
param_val = getattr(config, param)
# TODO (Yi) to handle param_val itself is a list
if isinstance(param_val, list):
params_dict[param] = param_val
# Create `TuningParam` for each param
# There are two cases:
# 1. The param is a string.
# 2. The param is a `TuningParam` instance.
if isinstance(param, str):
default_param = self.get_the_default_value_of_param(config, param)
tuning_param = TuningParam(name=param, tunable_type=List[type(default_param)])
elif isinstance(param, TuningParam):
tuning_param = param
else:
params_dict[param] = [param_val]
for params_values in product(*params_dict.values()):
new_config = self.__class__(**dict(zip(params_list, params_values)))
config_list.append(new_config)
logger.info(f"Expanded the {self.__class__.name} and got {len(config_list)} configs.")
raise ValueError(f"Unsupported param type: {param}")
# Assign the options to the `TuningParam` instance
param_val = getattr(config, tuning_param.name)
if param_val is not None:
if tuning_param.is_tunable(param_val):
tuning_param.options = param_val
tuning_param_list.append(tuning_param)
else:
not_tuning_param_pair[tuning_param.name] = param_val
logger.debug("Tuning param list: %s", tuning_param_list)
logger.debug("Not tuning param pair: %s", not_tuning_param_pair)
if len(tuning_param_list) == 0:
config_list = [config]
else:
tuning_param_name_lst = [tuning_param.name for tuning_param in tuning_param_list]
for params_values in product(*[tuning_param.options for tuning_param in tuning_param_list]):
tuning_param_pair = dict(zip(tuning_param_name_lst, params_values))
tmp_params_dict = {**not_tuning_param_pair, **tuning_param_pair}
new_config = self.__class__(**tmp_params_dict)
logger.info(new_config.to_dict())
config_list.append(new_config)
logger.info("Expanded the %s and got %d configs.", self.__class__.name, len(config_list))
return config_list

def _get_op_name_op_type_config(self):
Expand Down
140 changes: 99 additions & 41 deletions neural_compressor/common/base_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import copy
import inspect
import uuid
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
from typing import Any, Callable, Dict, Generator, Iterator, List, Optional, Sized, Tuple, Union

from neural_compressor.common import Logger
from neural_compressor.common.base_config import BaseConfig, ComposableConfig
Expand All @@ -31,6 +31,10 @@
"TuningMonitor",
"TuningLogger",
"init_tuning",
"Sampler",
"SequentialSampler",
"default_sampler",
"ConfigSet",
]


Expand Down Expand Up @@ -123,36 +127,103 @@ def self_check(self) -> None:
evaluator = Evaluator()


class Sampler:
# TODO Separate sorting functionality of `ConfigLoader` into `Sampler` in the follow-up PR.
pass
class ConfigSet:

def __init__(self, config_list: List[BaseConfig]) -> None:
self.config_list = config_list

class ConfigLoader:
def __init__(self, config_set, sampler: Sampler) -> None:
self.config_set = config_set
self.sampler = sampler
def __getitem__(self, index) -> BaseConfig:
assert 0 <= index < len(self.config_list), f"Index {index} out of range."
return self.config_list[index]

@staticmethod
def parse_quant_config(quant_config: BaseConfig) -> List[BaseConfig]:
if isinstance(quant_config, ComposableConfig):
result = []
for q_config in quant_config.config_list:
result += q_config.expand()
return result
def __len__(self) -> int:
return len(self.config_list)

@classmethod
def _from_single_config(cls, config: BaseConfig) -> List[BaseConfig]:
config_list = []
config_list = config.expand()
return config_list

@classmethod
def _from_list_of_configs(cls, fwk_configs: List[BaseConfig]) -> List[BaseConfig]:
config_list = []
for config in fwk_configs:
config_list += cls._from_single_config(config)
return config_list

@classmethod
def generate_config_list(cls, fwk_configs: Union[BaseConfig, List[BaseConfig]]):
# There are several cases for the input `fwk_configs`:
# 1. fwk_configs is a single config
# 2. fwk_configs is a list of configs
# For a single config, we need to check if it can be expanded or not.
config_list = []
if isinstance(fwk_configs, BaseConfig):
config_list = cls._from_single_config(fwk_configs)
elif isinstance(fwk_configs, List):
config_list = cls._from_list_of_configs(fwk_configs)
else:
return quant_config.expand()
raise NotImplementedError(f"Unsupported type {type(fwk_configs)} for fwk_configs.")
return config_list

@classmethod
def from_fwk_configs(cls, fwk_configs: Union[BaseConfig, List[BaseConfig]]) -> "ConfigSet":
"""Create a ConfigSet object from a single config or a list of configs.
Args:
fwk_configs: A single config or a list of configs.
Examples:
1) single config: RTNConfig(weight_group_size=32)
2) single expandable config: RTNConfig(weight_group_size=[32, 64])
3) mixed 1) and 2): [RTNConfig(weight_group_size=32), RTNConfig(weight_group_size=[32, 64])]
Returns:
ConfigSet: A ConfigSet object.
"""
config_list = cls.generate_config_list(fwk_configs)
return cls(config_list)


class Sampler:
def __init__(self, config_source: Optional[ConfigSet]) -> None:
pass

def __iter__(self) -> Iterator[BaseConfig]:
"""Iterate over indices of config set elements."""
raise NotImplementedError

def parse_quant_configs(self) -> List[BaseConfig]:
# TODO (Yi) separate this functionality into `Sampler` in the next PR
quant_config_list = []
for quant_config in self.config_set:
quant_config_list.extend(ConfigLoader.parse_quant_config(quant_config))
return quant_config_list

class SequentialSampler(Sampler):
"""Samples elements sequentially, always in the same order.
Args:
config_source (_ConfigSet): config set to sample from
"""

config_source: Sized

def __init__(self, config_source: Sized) -> None:
self.config_source = config_source

def __iter__(self) -> Iterator[int]:
return iter(range(len(self.config_source)))

def __len__(self) -> int:
return len(self.config_source)


default_sampler = SequentialSampler


class ConfigLoader:
def __init__(self, config_set: ConfigSet, sampler: Sampler = default_sampler) -> None:
self.config_set = ConfigSet.from_fwk_configs(config_set)
self._sampler = sampler(self.config_set)

def __iter__(self) -> Generator[BaseConfig, Any, None]:
for config in self.parse_quant_configs():
yield config
for index in self._sampler:
yield self.config_set[index]


class TuningLogger:
Expand Down Expand Up @@ -211,12 +282,14 @@ class TuningConfig:
Args:
config_set: quantization configs. Default value is empty.
timeout: Tuning timeout (seconds). Default value is 0 which means early stop.
A single config or a list of configs. More details can
be found in the `from_fwk_configs`of `ConfigSet` class.
max_trials: Max tuning times. Default value is 100. Combine with timeout field to decide when to exit.
tolerable_loss: This float indicates how much metric loss we can accept. \
The metric loss is relative, it can be both positive and negative. Default is 0.01.
Examples:
# TODO: to refine it
from neural_compressor import TuningConfig
tune_config = TuningConfig(
config_set=[config1, config2, ...],
Expand All @@ -239,28 +312,13 @@ class TuningConfig:
# The best tuning config is config2, because of the following:
# 1. Not achieving the set goal. (config_metric < fp32_baseline * (1 - tolerable_loss))
# 2. Reached maximum tuning times.
# Case 3: Timeout
tune_config = TuningConfig(
config_set=[config1, config2, ...],
timeout=10, # seconds
max_trials=3,
tolerable_loss=0.01
)
config1_tuning_time, config2_tuning_time, config3_tuning_time, ... = 4, 5, 6, ... # seconds
fp32_baseline = 100
config1_metric, config2_metric, config3_metric, ... = 98, 98, 97, ...
# Tuning result of case 3:
# The best tuning config is config2, due to timeout, the third trial was forced to exit.
"""

def __init__(
self, config_set=None, timeout=0, max_trials=100, sampler: Sampler = None, tolerable_loss=0.01
self, config_set=None, max_trials=100, sampler: Sampler = default_sampler, tolerable_loss=0.01
) -> None:
"""Init a TuneCriterion object."""
self.config_set = config_set
self.timeout = timeout
self.max_trials = max_trials
self.sampler = sampler
self.tolerable_loss = tolerable_loss
Expand Down
Loading

0 comments on commit ac47d9b

Please sign in to comment.