Skip to content

Commit

Permalink
Fix logger (#1583)
Browse files Browse the repository at this point in the history
Signed-off-by: yiliu30 <[email protected]>
Signed-off-by: chensuyue <[email protected]>
  • Loading branch information
yiliu30 authored Jan 31, 2024
1 parent fb61428 commit 83bc779
Show file tree
Hide file tree
Showing 12 changed files with 175 additions and 161 deletions.
2 changes: 1 addition & 1 deletion .azure-pipelines/scripts/ut/3x/run_3x_pt.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ fi

# check pytest issue
if [ -s run_pytest.sh ]; then
if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS ==' ${ut_log_name}) != 0 ] || [ $(grep -c 'passed,' ${ut_log_name}) == 0 ]; then
if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS ==' ${ut_log_name}) != 0 ] || [ $(grep -c ' passed ' ${ut_log_name}) == 0 ]; then
echo "Find errors in pytest case, please check the output..."
echo "Please search for '== FAILURES ==' or '== ERRORS =='"
ut_status="failed"
Expand Down
15 changes: 5 additions & 10 deletions docs/source/adaptor.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,22 +44,17 @@ For example, a user can implement an `Abc` adaptor like below:
```python
@adaptor_registry
class AbcAdaptor(Adaptor):
def __init__(self, framework_specific_info):
...
def __init__(self, framework_specific_info): ...

def quantize(self, tune_cfg, model, dataloader, q_func=None):
...
def quantize(self, tune_cfg, model, dataloader, q_func=None): ...

def evaluate(
self, model, dataloader, postprocess=None, metric=None, measurer=None, iteration=-1, tensorboard=False
):
...
): ...

def query_fw_capability(self, model):
...
def query_fw_capability(self, model): ...

def query_fused_patterns(self, model):
...
def query_fused_patterns(self, model): ...
```

* `quantize` function is used to perform quantization for post-training quantization and quantization-aware training. Quantization processing includes calibration and conversion processing for post-training quantization, while for quantization-aware training, it includes training and conversion processing.
Expand Down
18 changes: 6 additions & 12 deletions docs/source/migration.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ val_dataloader = torch.utils.data.Dataloader(
)


def eval_func(model):
...
def eval_func(model): ...


# Quantization code
Expand Down Expand Up @@ -115,8 +114,7 @@ val_dataloader = torch.utils.data.Dataloader(
)


def eval_func(model):
...
def eval_func(model): ...


# Quantization code
Expand Down Expand Up @@ -147,12 +145,10 @@ model = AutoModelForSequenceClassification.from_pretrained(model_name_or_path)
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path)


def eval_func(model):
...
def eval_func(model): ...


def train_func(model):
...
def train_func(model): ...


trainer = Trainer(...)
Expand Down Expand Up @@ -213,12 +209,10 @@ model = AutoModelForSequenceClassification.from_pretrained(model_name_or_path)
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path)


def eval_func(model):
...
def eval_func(model): ...


def train_func(model):
...
def train_func(model): ...


trainer = Trainer(...)
Expand Down
1 change: 1 addition & 0 deletions docs/source/pruning.md
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,7 @@ The following section exemplifies how to use hooks in user pass-in training func
on_after_optimizer_step() # Update weights' criteria, mask weights
on_train_end() # End of pruner, print sparse information
"""

from neural_compressor.training import prepare_compression, WeightPruningConfig

config = WeightPruningConfig(configs)
Expand Down
3 changes: 1 addition & 2 deletions docs/source/quantization.md
Original file line number Diff line number Diff line change
Expand Up @@ -319,8 +319,7 @@ criterion = ...


# Quantization code
def train_func(model):
...
def train_func(model): ...


from neural_compressor import QuantizationAwareTrainingConfig
Expand Down
3 changes: 1 addition & 2 deletions docs/source/tuning_strategies.md
Original file line number Diff line number Diff line change
Expand Up @@ -507,8 +507,7 @@ For example, user can implement an `Abc` strategy like below:
```python
@strategy_registry
class AbcTuneStrategy(TuneStrategy):
def __init__(self, model, conf, q_dataloader, q_func=None, eval_dataloader=None, eval_func=None, dicts=None):
...
def __init__(self, model, conf, q_dataloader, q_func=None, eval_dataloader=None, eval_func=None, dicts=None): ...

def next_tune_cfg(self):
# generate the next tuning config
Expand Down
26 changes: 5 additions & 21 deletions neural_compressor/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,37 +14,21 @@

from neural_compressor.common.utils import (
level,
log,
info,
DEBUG,
debug,
warn,
warning,
error,
fatal,
logger,
Logger,
set_random_seed,
set_workspace,
set_resume_from,
set_workspace,
set_tensorboard,
Logger,
logger,
)
from neural_compressor.common.base_config import options


__all__ = [
"level",
"log",
"info",
"DEBUG",
"debug",
"warn",
"warning",
"error",
"fatal",
"options",
"Logger",
"level",
"logger",
"Logger",
"set_workspace",
"set_random_seed",
"set_resume_from",
Expand Down
154 changes: 76 additions & 78 deletions neural_compressor/common/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,28 @@
import logging
import os

__all__ = [
"level",
"Logger", # TODO: not expose it
"logger",
]


def _pretty_dict(value, indent=0):
"""Make the logger dict pretty."""
prefix = "\n" + " " * (indent + 4)
if isinstance(value, dict):
items = [prefix + repr(key) + ": " + _pretty_dict(value[key], indent + 4) for key in value]
return "{%s}" % (",".join(items) + "\n" + " " * indent)
elif isinstance(value, list):
items = [prefix + _pretty_dict(item, indent + 4) for item in value]
return "[%s]" % (",".join(items) + "\n" + " " * indent)
elif isinstance(value, tuple):
items = [prefix + _pretty_dict(item, indent + 4) for item in value]
return "(%s)" % (",".join(items) + "\n" + " " * indent)
else:
return repr(value)


class Logger(object):
"""Logger class."""
Expand Down Expand Up @@ -50,85 +72,61 @@ def get_logger(self):
"""Get the logger."""
return self._logger


def _pretty_dict(value, indent=0):
"""Make the logger dict pretty."""
prefix = "\n" + " " * (indent + 4)
if isinstance(value, dict):
items = [prefix + repr(key) + ": " + _pretty_dict(value[key], indent + 4) for key in value]
return "{%s}" % (",".join(items) + "\n" + " " * indent)
elif isinstance(value, list):
items = [prefix + _pretty_dict(item, indent + 4) for item in value]
return "[%s]" % (",".join(items) + "\n" + " " * indent)
elif isinstance(value, tuple):
items = [prefix + _pretty_dict(item, indent + 4) for item in value]
return "(%s)" % (",".join(items) + "\n" + " " * indent)
else:
return repr(value)
@staticmethod
def log(level, msg, *args, **kwargs):
"""Output log with the level as a parameter."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().log(level, line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().log(level, msg, *args, **kwargs, stacklevel=2)

@staticmethod
def debug(msg, *args, **kwargs):
"""Output log with the debug level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().debug(line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().debug(msg, *args, **kwargs, stacklevel=2)

@staticmethod
def error(msg, *args, **kwargs):
"""Output log with the error level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().error(line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().error(msg, *args, **kwargs, stacklevel=2)

@staticmethod
def fatal(msg, *args, **kwargs):
"""Output log with the fatal level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().fatal(line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().fatal(msg, *args, **kwargs, stacklevel=2)

@staticmethod
def info(msg, *args, **kwargs):
"""Output log with the info level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().info(line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().info(msg, *args, **kwargs, stacklevel=2)

@staticmethod
def warning(msg, *args, **kwargs):
"""Output log with the warning level (Alias of the method warn)."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().warning(line, *args, **kwargs, stacklevel=2)
else:
Logger().get_logger().warning(msg, *args, **kwargs, stacklevel=2)


level = Logger().get_logger().level
DEBUG = logging.DEBUG


def log(level, msg, *args, **kwargs):
"""Output log with the level as a parameter."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().log(level, line, *args, **kwargs)
else:
Logger().get_logger().log(level, msg, *args, **kwargs)


def debug(msg, *args, **kwargs):
"""Output log with the debug level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().debug(line, *args, **kwargs)
else:
Logger().get_logger().debug(msg, *args, **kwargs)


def error(msg, *args, **kwargs):
"""Output log with the error level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().error(line, *args, **kwargs)
else:
Logger().get_logger().error(msg, *args, **kwargs)


def fatal(msg, *args, **kwargs):
"""Output log with the fatal level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().fatal(line, *args, **kwargs)
else:
Logger().get_logger().fatal(msg, *args, **kwargs)


def info(msg, *args, **kwargs):
"""Output log with the info level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().info(line, *args, **kwargs)
else:
Logger().get_logger().info(msg, *args, **kwargs)


def warn(msg, *args, **kwargs):
"""Output log with the warning level."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().warning(line, *args, **kwargs)
else:
Logger().get_logger().warning(msg, *args, **kwargs)


def warning(msg, *args, **kwargs):
"""Output log with the warning level (Alias of the method warn)."""
if isinstance(msg, dict):
for _, line in enumerate(_pretty_dict(msg).split("\n")):
Logger().get_logger().warning(line, *args, **kwargs)
else:
Logger().get_logger().warning(msg, *args, **kwargs)
logger = Logger
1 change: 1 addition & 0 deletions neural_compressor/compression/pruner/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,7 @@ The following section exemplifies how to use hooks in user pass-in training func
on_after_optimizer_step() # Update weights' criteria, mask weights
on_train_end() # End of pruner, print sparse information
"""

from neural_compressor.training import prepare_compression, WeightPruningConfig

config = WeightPruningConfig(configs)
Expand Down
2 changes: 1 addition & 1 deletion neural_compressor/torch/quantization/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from torch.autograd import Function
from torch.nn import functional as F

from neural_compressor.common import DEBUG, level, logger
from neural_compressor.common import logger
from neural_compressor.torch.algorithms.weight_only import quant_tensor


Expand Down
Loading

0 comments on commit 83bc779

Please sign in to comment.