-
Notifications
You must be signed in to change notification settings - Fork 258
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Cheng, Zixuan <[email protected]>
- Loading branch information
1 parent
55d618f
commit af10495
Showing
28 changed files
with
1,994 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
1 change: 1 addition & 0 deletions
1
examples/pytorch/image_recognition/3d-unet/quantization/ptq/fx/.dockerignore
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
build/ |
1 change: 1 addition & 0 deletions
1
examples/pytorch/image_recognition/3d-unet/quantization/ptq/fx/.gitignore
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
build/ |
204 changes: 204 additions & 0 deletions
204
examples/pytorch/image_recognition/3d-unet/quantization/ptq/fx/Makefile
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,204 @@ | ||
# Copyright (c) 2021 Intel Corporation | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
SHELL := /bin/bash | ||
|
||
MAKEFILE_NAME := $(lastword $(MAKEFILE_LIST)) | ||
UNAME := $(shell whoami) | ||
UID := $(shell id -u `whoami`) | ||
GROUPNAME := $(shell id -gn `whoami`) | ||
GROUPID := $(shell id -g `whoami`) | ||
|
||
HOST_VOL ?= ${PWD} | ||
CONTAINER_VOL ?= /workspace | ||
|
||
BUILD_DIR := build | ||
ifndef DOWNLOAD_DATA_DIR | ||
export DOWNLOAD_DATA_DIR := $(HOST_VOL)/$(BUILD_DIR)/MICCAI_BraTS_2019_Data_Training | ||
endif | ||
RAW_DATA_DIR := $(BUILD_DIR)/raw_data | ||
PREPROCESSED_DATA_DIR := $(BUILD_DIR)/preprocessed_data | ||
POSTPROCESSED_DATA_DIR := $(BUILD_DIR)/postprocessed_data | ||
MODEL_DIR := $(BUILD_DIR)/model | ||
RESULT_DIR := $(BUILD_DIR)/result | ||
MLPERF_CONF := $(BUILD_DIR)/mlperf.conf | ||
PYTORCH_MODEL := $(RESULT_DIR)/fold_1.zip | ||
ONNX_MODEL := $(MODEL_DIR)/224_224_160.onnx | ||
ONNX_DYNAMIC_BS_MODEL := $(MODEL_DIR)/224_224_160_dynamic_bs.onnx | ||
TF_MODEL := $(MODEL_DIR)/224_224_160.pb | ||
OPENVINO_MODEL := $(MODEL_DIR)/brats_model_checkpoint_final_fold1_H224_W224_D160_C4.bin | ||
OPENVINO_MODEL_METADATA := $(MODEL_DIR)/brats_model_checkpoint_final_fold1_H224_W224_D160_C4.xml | ||
|
||
# Env variables needed by nnUnet | ||
export nnUNet_raw_data_base=$(RAW_DATA_DIR) | ||
export nnUNet_preprocessed=$(PREPROCESSED_DATA_DIR) | ||
export RESULTS_FOLDER=$(RESULT_DIR) | ||
|
||
.PHONY: setup | ||
setup: check_download_data_dir create_directories | ||
@echo "Running basic setup..." | ||
@if [ ! -e $(MLPERF_CONF) ]; then \ | ||
cp ../../../mlperf.conf $(MLPERF_CONF); \ | ||
fi | ||
@$(MAKE) -f $(MAKEFILE_NAME) init_submodule | ||
@$(MAKE) -f $(MAKEFILE_NAME) download_model | ||
|
||
.PHONY: check_download_data_dir | ||
check_download_data_dir: | ||
@if [ ! -e $(DOWNLOAD_DATA_DIR) ]; then \ | ||
echo "Please set environment variable DOWNLOAD_DATA_DIR to <path/to/MICCAI_BraTS_2019_Data_Training>" && false ; \ | ||
fi | ||
|
||
.PHONY: create_directories | ||
create_directories: | ||
@if [ ! -e $(BUILD_DIR) ]; then \ | ||
mkdir $(BUILD_DIR); \ | ||
fi | ||
@if [ ! -e $(MODEL_DIR) ]; then \ | ||
mkdir $(MODEL_DIR); \ | ||
fi | ||
@if [ ! -e $(RESULT_DIR) ]; then \ | ||
mkdir $(RESULT_DIR); \ | ||
fi | ||
|
||
.PHONY: init_submodule | ||
init_submodule: | ||
@echo "Initialize nnUnet submodule.." | ||
#@git submodule update --init nnUnet | ||
|
||
.PHONY: download_model | ||
download_model: | ||
@echo "Download models..." | ||
@$(MAKE) -f $(MAKEFILE_NAME) download_pytorch_model | ||
@$(MAKE) -f $(MAKEFILE_NAME) download_onnx_model | ||
@$(MAKE) -f $(MAKEFILE_NAME) download_tf_model | ||
@$(MAKE) -f $(MAKEFILE_NAME) download_openvino_model | ||
|
||
.PHONY: download_pytorch_model | ||
download_pytorch_model: create_directories | ||
@echo "Downloading PyTorch model from Zenodo..." | ||
@if [ ! -e $(PYTORCH_MODEL) ]; then \ | ||
wget -O $(PYTORCH_MODEL) https://zenodo.org/record/3904106/files/fold_1.zip?download=1 \ | ||
&& cd $(RESULT_DIR) && unzip -o fold_1.zip; \ | ||
fi | ||
|
||
.PHONY: download_onnx_model | ||
download_onnx_model: create_directories | ||
@echo "Downloading ONNX model from Zenodo..." | ||
@if [ ! -e $(ONNX_MODEL) ]; then \ | ||
wget -O $(ONNX_MODEL) https://zenodo.org/record/3928973/files/224_224_160.onnx?download=1; \ | ||
fi | ||
@if [ ! -e $(ONNX_DYNAMIC_BS_MODEL) ]; then \ | ||
wget -O $(ONNX_DYNAMIC_BS_MODEL) https://zenodo.org/record/3928973/files/224_224_160_dyanmic_bs.onnx?download=1; \ | ||
fi | ||
|
||
.PHONY: download_tf_model | ||
download_tf_model: create_directories | ||
@echo "Downloading TF model from Zenodo..." | ||
@if [ ! -e $(TF_MODEL) ]; then \ | ||
wget -O $(TF_MODEL) https://zenodo.org/record/3928991/files/224_224_160.pb?download=1; \ | ||
fi | ||
|
||
.PHONY: download_openvino_model | ||
download_openvino_model: create_directories | ||
@echo "Downloading OpenVINO model from Zenodo..." | ||
@if [ ! -e $(OPENVINO_MODEL) ]; then \ | ||
wget -O $(OPENVINO_MODEL) https://zenodo.org/record/3929002/files/brats_model_checkpoint_final_fold1_H224_W224_D160_C4.bin?download=1; \ | ||
fi | ||
@if [ ! -e $(OPENVINO_MODEL_METADATA) ]; then \ | ||
wget -O $(OPENVINO_MODEL_METADATA) https://zenodo.org/record/3929002/files/brats_model_checkpoint_final_fold1_H224_W224_D160_C4.xml?download=1; \ | ||
fi | ||
|
||
.PHONY: convert_onnx_model | ||
convert_onnx_model: download_pytorch_model | ||
@echo "Converting PyTorch model to ONNX model..." | ||
@if [ ! -e $(ONNX_MODEL) ]; then \ | ||
python3 unet_pytorch_to_onnx.py; \ | ||
fi | ||
|
||
.PHONY: convert_tf_model | ||
convert_tf_model: convert_onnx_model | ||
@echo "Converting ONNX model to TF model..." | ||
@if [ ! -e $(TF_MODEL) ]; then \ | ||
python3 unet_onnx_to_tf.py; \ | ||
fi | ||
|
||
.PHONY: preprocess_data | ||
preprocess_data: create_directories | ||
@echo "Restructuring raw data to $(RAW_DATA_DIR)..." | ||
@if [ ! -e $(RAW_DATA_DIR) ]; then \ | ||
mkdir $(RAW_DATA_DIR); \ | ||
fi | ||
@python3 Task043_BraTS_2019.py --downloaded_data_dir $(DOWNLOAD_DATA_DIR) | ||
@echo "Preprocessing and saving preprocessed data to $(PREPROCESSED_DATA_DIR)..." | ||
@if [ ! -e $(PREPROCESSED_DATA_DIR) ]; then \ | ||
mkdir $(PREPROCESSED_DATA_DIR); \ | ||
fi | ||
@python3 preprocess.py | ||
|
||
.PHONY: mkdir_postprocessed_data | ||
mkdir_postprocessed_data: | ||
@if [ ! -e $(POSTPROCESSED_DATA_DIR) ]; then \ | ||
mkdir $(POSTPROCESSED_DATA_DIR); \ | ||
fi | ||
|
||
.PHONY: run_pytorch_performance | ||
run_pytorch_performance: | ||
@python3 run.py --backend=pytorch | ||
|
||
.PHONY: run_pytorch_accuracy | ||
run_pytorch_accuracy: mkdir_postprocessed_data | ||
@python3 run.py --backend=pytorch --accuracy | ||
|
||
.PHONY: run_pytorch_NC_tuning | ||
run_pytorch_NC_tuning: mkdir_postprocessed_data | ||
@python3 run.py --backend=pytorch --accuracy --tune --mlperf_conf=./mlperf.conf | ||
|
||
.PHONY: run_onnxruntime_performance | ||
run_onnxruntime_performance: | ||
@python3 run.py --backend=onnxruntime --model=build/model/224_224_160.onnx | ||
|
||
.PHONY: run_onnxruntime_accuracy | ||
run_onnxruntime_accuracy: mkdir_postprocessed_data | ||
@python3 run.py --backend=onnxruntime --model=build/model/224_224_160.onnx --accuracy | ||
|
||
.PHONY: run_tf_performance | ||
run_tf_performance: | ||
@python3 run.py --backend=tf --model=build/model/224_224_160.pb | ||
|
||
.PHONY: run_tf_accuracy | ||
run_tf_accuracy: mkdir_postprocessed_data | ||
@python3 run.py --backend=tf --model=build/model/224_224_160.pb --accuracy | ||
|
||
.PHONY: evaluate | ||
evaluate: | ||
@python3 accuracy-brats.py | ||
|
||
.PHONY: clean | ||
clean: | ||
@rm -rf build |
76 changes: 76 additions & 0 deletions
76
examples/pytorch/image_recognition/3d-unet/quantization/ptq/fx/README.md
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
Step-by-Step | ||
============ | ||
|
||
This example is used to demonstrate the steps of reproducing quantization and benchmarking results with Intel® Neural Compressor. | ||
|
||
The 3D-Unet source code comes from [mlperf](https://github.com/mlcommons/inference/tree/v1.0.1/vision/medical_imaging/3d-unet), commit SHA is **b7e8f0da170a421161410d18e5d2a05d75d6bccf**; [nnUnet](https://github.com/MIC-DKFZ/nnUNet) commit SHA is **b38c69b345b2f60cd0d053039669e8f988b0c0af**. Users could diff them with this example to know which changes have been made to integrate with Intel® Neural Compressor.. | ||
|
||
The model is performing on [BraTS 2019](https://www.med.upenn.edu/cbica/brats2019/data.html) brain tumor segmentation task. | ||
|
||
# Prerequisite | ||
## 1. Environment | ||
Python 3.6 or higher version is recommended. | ||
The dependent packages are all in requirements, please install as following. | ||
```shell | ||
cd examples/pytorch/image_recognition/3d-unet/quantization/ptq/fx | ||
pip install -r requirements.txt | ||
``` | ||
## 2. Preprocess Dataset | ||
```shell | ||
# download BraTS 2019 from https://www.med.upenn.edu/cbica/brats2019/data.html | ||
export DOWNLOAD_DATA_DIR=<path/to/MICCAI_BraTS_2019_Data_Training> # point to location of downloaded BraTS 2019 Training dataset. | ||
|
||
# install dependency required by data preprocessing script | ||
git clone https://github.com/MIC-DKFZ/nnUNet.git --recursive | ||
cd nnUNet/ | ||
git checkout b38c69b345b2f60cd0d053039669e8f988b0c0af | ||
# replace sklearn in the older version with scikit-learn | ||
sed -i 's/sklearn/scikit-learn/g' setup.py | ||
python setup.py install | ||
cd .. | ||
|
||
# download pytorch model | ||
make download_pytorch_model | ||
|
||
# generate preprocessed data | ||
make preprocess_data | ||
|
||
# create postprocess dir | ||
make mkdir_postprocessed_data | ||
|
||
# generate calibration preprocessed data | ||
python preprocess.py --preprocessed_data_dir=./build/calib_preprocess/ --validation_fold_file=./brats_cal_images_list.txt | ||
|
||
# install mlperf loadgen required by tuning script | ||
git clone https://github.com/mlcommons/inference.git --recursive | ||
cd inference | ||
git checkout b7e8f0da170a421161410d18e5d2a05d75d6bccf | ||
cd loadgen | ||
pip install absl-py | ||
python setup.py install | ||
cd ../.. | ||
``` | ||
|
||
# Run | ||
## 1. Quantization | ||
|
||
```shell | ||
make run_pytorch_NC_tuning | ||
``` | ||
|
||
or | ||
|
||
```shell | ||
python run.py --model_dir=build/result/nnUNet/3d_fullres/Task043_BraTS2019/nnUNetTrainerV2__nnUNetPlansv2.mlperf.1 --backend=pytorch --accuracy --preprocessed_data_dir=build/preprocessed_data/ --mlperf_conf=./mlperf.conf --tune | ||
``` | ||
## 2. Benchmark | ||
```bash | ||
# int8 | ||
sh run_benchmark.sh --int8=true --input_model=build/result/nnUNet/3d_fullres/Task043_BraTS2019/nnUNetTrainerV2__nnUNetPlansv2.mlperf.1 --dataset_location=build/preprocessed_data/ | ||
# fp32 | ||
sh run_benchmark.sh --input_model=build/result/nnUNet/3d_fullres/Task043_BraTS2019/nnUNetTrainerV2__nnUNetPlansv2.mlperf.1 --dataset_location=build/preprocessed_data/ | ||
``` | ||
## 3. Model Baseline | ||
| model | framework | accuracy | dataset | model link | model source | precision | | ||
| - | - | - | - | - | - | - | | ||
| 3D-Unet | PyTorch | **mean = 0.85300** (whole tumor = 0.9141, tumor core = 0.8679, enhancing tumor = 0.7770) | [Fold 1](folds/fold1_validation.txt) of [BraTS 2019](https://www.med.upenn.edu/cbica/brats2019/data.html) Training Dataset | [from zenodo](https://zenodo.org/record/3904106) | Trained in PyTorch using codes from[nnUnet](https://github.com/MIC-DKFZ/nnUNet) on [Fold 0](folds/fold0_validation.txt), [Fold 2](folds/fold2_validation.txt), [Fold 3](folds/fold3_validation.txt), and [Fold 4](folds/fold4_validation.txt) of [BraTS 2019](https://www.med.upenn.edu/cbica/brats2019/data.html) Training Dataset. | fp32 | |
Oops, something went wrong.