Skip to content

Commit dc9328c

Browse files
authored
Introduce INC 3.0 quantization API and port torch RTN into 3.0 (#1380)
Signed-off-by: yiliu30 <yi4.liu@intel.com> Signed-off-by: chensuyue <suyue.chen@intel.com>
1 parent da3442d commit dc9328c

23 files changed

+1038
-5
lines changed

.azure-pipelines/model-test.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,10 @@ pr:
1414
- .azure-pipelines/model-test.yml
1515
- .azure-pipelines/scripts/models
1616
- examples/tensorflow/oob_models/quantization/ptq
17+
exclude:
18+
- test
19+
- neural_compressor/common
20+
- neural_compressor/torch
1721

1822
pool: MODEL_PERF_TEST_TF
1923

.azure-pipelines/scripts/install_nc.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,6 @@
22

33
cd /neural-compressor
44
python -m pip install --no-cache-dir -r requirements.txt
5-
python setup.py sdist bdist_wheel
5+
python setup.py bdist_wheel
66
pip install dist/neural_compressor*.whl
77
pip list
Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
source /neural-compressor/.azure-pipelines/scripts/change_color.sh
2+
3+
pip install coverage
4+
export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.${1}
5+
coverage_log="/neural-compressor/log_dir/coverage_log"
6+
coverage_log_base="/neural-compressor/log_dir/coverage_log_base"
7+
coverage_compare="/neural-compressor/log_dir/coverage_compare.html"
8+
cd /neural-compressor/log_dir
9+
10+
$BOLD_YELLOW && echo "collect coverage for PR branch" && $RESET
11+
cp ut_coverage_3x/.coverage /neural-compressor/
12+
mkdir -p coverage_PR
13+
cd /neural-compressor
14+
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log}
15+
coverage html -d log_dir/coverage_PR/htmlcov --rcfile=${COVERAGE_RCFILE}
16+
coverage xml -o log_dir/coverage_PR/coverage.xml --rcfile=${COVERAGE_RCFILE}
17+
ls -l log_dir/coverage_PR/htmlcov
18+
19+
20+
$BOLD_YELLOW && echo "collect coverage for baseline" && $RESET
21+
cd /neural-compressor
22+
git config --global --add safe.directory /neural-compressor
23+
git fetch
24+
git checkout master
25+
echo y | pip uninstall neural-compressor
26+
cd /neural-compressor/.azure-pipelines/scripts && bash install_nc.sh
27+
28+
coverage erase
29+
cd /neural-compressor/log_dir
30+
mkdir -p coverage_base
31+
rm -rf /neural-compressor/.coverage || true
32+
cp ut_coverage_3x_baseline/.coverage /neural-compressor
33+
34+
cd /neural-compressor
35+
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log_base}
36+
coverage html -d log_dir/coverage_base/htmlcov --rcfile=${COVERAGE_RCFILE}
37+
coverage xml -o log_dir/coverage_base/coverage.xml --rcfile=${COVERAGE_RCFILE}
38+
ls -l log_dir/coverage_base/htmlcov
39+
40+
get_coverage_data() {
41+
# Input argument
42+
local coverage_xml="$1"
43+
44+
# Get coverage data
45+
local coverage_data=$(python3 -c "import xml.etree.ElementTree as ET; root = ET.parse('$coverage_xml').getroot(); print(ET.tostring(root).decode())")
46+
if [[ -z "$coverage_data" ]]; then
47+
echo "Failed to get coverage data from $coverage_xml."
48+
exit 1
49+
fi
50+
51+
# Get lines coverage
52+
local lines_covered=$(echo "$coverage_data" | grep -o 'lines-covered="[0-9]*"' | cut -d '"' -f 2)
53+
local lines_valid=$(echo "$coverage_data" | grep -o 'lines-valid="[0-9]*"' | cut -d '"' -f 2)
54+
if [ $lines_valid == 0 ]; then
55+
local lines_coverage=0
56+
else
57+
local lines_coverage=$(awk "BEGIN {printf \"%.3f\", 100 * $lines_covered / $lines_valid}")
58+
fi
59+
60+
# Get branches coverage
61+
local branches_covered=$(echo "$coverage_data" | grep -o 'branches-covered="[0-9]*"' | cut -d '"' -f 2)
62+
local branches_valid=$(echo "$coverage_data" | grep -o 'branches-valid="[0-9]*"' | cut -d '"' -f 2)
63+
if [ $branches_valid == 0 ]; then
64+
local branches_coverage=0
65+
else
66+
local branches_coverage=$(awk "BEGIN {printf \"%.3f\", 100 * $branches_covered/$branches_valid}")
67+
fi
68+
69+
# Return values
70+
echo "$lines_covered $lines_valid $lines_coverage $branches_covered $branches_valid $branches_coverage"
71+
}
72+
73+
$BOLD_YELLOW && echo "compare coverage" && $RESET
74+
75+
coverage_PR_xml="log_dir/coverage_PR/coverage.xml"
76+
coverage_PR_data=$(get_coverage_data $coverage_PR_xml)
77+
read lines_PR_covered lines_PR_valid coverage_PR_lines_rate branches_PR_covered branches_PR_valid coverage_PR_branches_rate <<<"$coverage_PR_data"
78+
79+
coverage_base_xml="log_dir/coverage_base/coverage.xml"
80+
coverage_base_data=$(get_coverage_data $coverage_base_xml)
81+
read lines_base_covered lines_base_valid coverage_base_lines_rate branches_base_covered branches_base_valid coverage_base_branches_rate <<<"$coverage_base_data"
82+
83+
$BOLD_BLUE && echo "PR lines coverage: $lines_PR_covered/$lines_PR_valid ($coverage_PR_lines_rate%)" && $RESET
84+
$BOLD_BLUE && echo "PR branches coverage: $branches_PR_covered/$branches_PR_valid ($coverage_PR_branches_rate%)" && $RESET
85+
$BOLD_BLUE && echo "BASE lines coverage: $lines_base_covered/$lines_base_valid ($coverage_base_lines_rate%)" && $RESET
86+
$BOLD_BLUE && echo "BASE branches coverage: $branches_base_covered/$branches_base_valid ($coverage_base_branches_rate%)" && $RESET
87+
88+
$BOLD_YELLOW && echo "clear upload path" && $RESET
89+
rm -fr log_dir/coverage_PR/.coverage*
90+
rm -fr log_dir/coverage_base/.coverage*
91+
rm -fr log_dir/ut-coverage-*
92+
93+
# Declare an array to hold failed items
94+
declare -a fail_items=()
95+
96+
if (( $(bc -l <<< "${coverage_PR_lines_rate}+0.05 < ${coverage_base_lines_rate}") )); then
97+
fail_items+=("lines")
98+
fi
99+
if (( $(bc -l <<< "${coverage_PR_branches_rate}+0.05 < ${coverage_base_branches_rate}") )); then
100+
fail_items+=("branches")
101+
fi
102+
103+
if [[ ${#fail_items[@]} -ne 0 ]]; then
104+
fail_items_str=$(
105+
IFS=', '
106+
echo "${fail_items[*]}"
107+
)
108+
for item in "${fail_items[@]}"; do
109+
case "$item" in
110+
lines)
111+
decrease=$(echo $(printf "%.3f" $(echo "$coverage_PR_lines_rate - $coverage_base_lines_rate" | bc -l)))
112+
;;
113+
branches)
114+
decrease=$(echo $(printf "%.3f" $(echo "$coverage_PR_branches_rate - $coverage_base_branches_rate" | bc -l)))
115+
;;
116+
*)
117+
echo "Unknown item: $item"
118+
continue
119+
;;
120+
esac
121+
$BOLD_RED && echo "Unit Test failed with ${item} coverage decrease ${decrease}%" && $RESET
122+
done
123+
$BOLD_RED && echo "compare coverage to give detail info" && $RESET
124+
bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
125+
exit 1
126+
else
127+
$BOLD_GREEN && echo "Unit Test success with coverage lines: ${coverage_PR_lines_rate}%, branches: ${coverage_PR_branches_rate}%" && $RESET
128+
$BOLD_GREEN && echo "compare coverage to give detail info" && $RESET
129+
bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
130+
fi

.azure-pipelines/scripts/ut/coverage.file

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ omit =
1515
*/neural_compressor/adaptor/tf_utils/quantize_graph/qdq/fuse_qdq_in.py
1616
*/neural_compressor/adaptor/tf_utils/graph_rewriter/int8/freeze_value.py
1717
*/neural_compressor/template/*
18+
*/neural_compressor/common/*
19+
*/neural_compressor/torch/*
1820
exclude_lines =
1921
pragma: no cover
2022
raise NotImplementedError
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
[run]
2+
branch = True
3+
4+
[report]
5+
include =
6+
*/neural_compressor/common/*
7+
*/neural_compressor/torch/*
8+
exclude_lines =
9+
pragma: no cover
10+
raise NotImplementedError
11+
raise TypeError
12+
if self.device == "gpu":
13+
if device == "gpu":
14+
except ImportError:
15+
except Exception as e:
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
#!/bin/bash
2+
python -c "import neural_compressor as nc;print(nc.version.__version__)"
3+
test_case="run 3x Torch"
4+
echo "${test_case}"
5+
6+
# install requirements
7+
echo "set up UT env..."
8+
pip install -r /neural-compressor/requirements_pt.txt
9+
pip install coverage
10+
pip install pytest
11+
pip list
12+
13+
export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.pt
14+
lpot_path=$(python -c 'import neural_compressor; import os; print(os.path.dirname(neural_compressor.__file__))')
15+
cd /neural-compressor/test || exit 1
16+
find ./3x/torch/* -name "test*.py" | sed 's,\.\/,coverage run --source='"${lpot_path}"' --append ,g' | sed 's/$/ --verbose/'> run.sh
17+
18+
LOG_DIR=/neural-compressor/log_dir
19+
mkdir -p ${LOG_DIR}
20+
ut_log_name=${LOG_DIR}/ut_3x_pt.log
21+
22+
echo "cat run.sh..."
23+
sort run.sh -o run.sh
24+
cat run.sh | tee ${ut_log_name}
25+
echo "------UT start-------"
26+
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
27+
cp .coverage ${LOG_DIR}/.coverage
28+
29+
echo "------UT end -------"
30+
31+
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
32+
echo "Find errors in UT test, please check the output..."
33+
exit 1
34+
fi
35+
echo "UT finished successfully! "

.azure-pipelines/scripts/ut/run_basic_others.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ sed -i '/ mixed_precision/d' run.sh
2525
sed -i '/ distillation\//d' run.sh
2626
sed -i '/ scheduler\//d' run.sh
2727
sed -i '/ nas\//d' run.sh
28+
sed -i '/ 3x\//d' run.sh
2829

2930
echo "copy model for dynas..."
3031
mkdir -p .torch/ofa_nets || true

.azure-pipelines/ut-3x-pt.yml

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
trigger: none
2+
3+
pr:
4+
autoCancel: true
5+
drafts: false
6+
branches:
7+
include:
8+
- master
9+
paths:
10+
include:
11+
- neural_compressor/common
12+
- neural_compressor/torch
13+
- test/3x/torch
14+
- setup.py
15+
- requirements.txt
16+
- requirements_pt.txt
17+
- .azure-pipelines/scripts/ut
18+
19+
pool: ICX-16C
20+
21+
variables:
22+
IMAGE_NAME: "neural-compressor"
23+
IMAGE_TAG: "py38"
24+
UPLOAD_PATH: $(Build.SourcesDirectory)/log_dir
25+
DOWNLOAD_PATH: $(Build.SourcesDirectory)/log_dir
26+
ARTIFACT_NAME: "UT_coverage_report_3x_pt"
27+
REPO: $(Build.Repository.Uri)
28+
29+
stages:
30+
- stage: Torch
31+
displayName: Unit Test 3x Torch
32+
dependsOn: []
33+
jobs:
34+
- job:
35+
displayName: Unit Test 3x Torch
36+
steps:
37+
- template: template/ut-template.yml
38+
parameters:
39+
dockerConfigName: "commonDockerConfig"
40+
utScriptFileName: "run_3x_pt"
41+
uploadPath: $(UPLOAD_PATH)
42+
utArtifact: "ut_coverage_3x"
43+
44+
45+
- stage: Torch_baseline
46+
displayName: Unit Test 3x Torch baseline
47+
dependsOn: []
48+
jobs:
49+
- job:
50+
displayName: Unit Test 3x Torch baseline
51+
steps:
52+
- template: template/ut-template.yml
53+
parameters:
54+
dockerConfigName: "gitCloneDockerConfig"
55+
utScriptFileName: "run_3x_pt"
56+
uploadPath: $(UPLOAD_PATH)
57+
utArtifact: "ut_coverage_3x_baseline"
58+
repo: $(REPO)
59+
60+
- stage: Coverage
61+
displayName: "Coverage Combine"
62+
pool:
63+
vmImage: "ubuntu-latest"
64+
dependsOn: [Torch, Torch_baseline]
65+
jobs:
66+
- job: CollectDatafiles
67+
steps:
68+
- script: |
69+
if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then
70+
docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} .
71+
fi
72+
docker images | grep -i ${IMAGE_NAME}
73+
if [[ $? -ne 0 ]]; then
74+
echo "NO Such Repo"
75+
exit 1
76+
fi
77+
displayName: "Build develop docker image"
78+
79+
- task: DownloadPipelineArtifact@2
80+
inputs:
81+
artifact:
82+
path: $(DOWNLOAD_PATH)
83+
84+
- script: |
85+
echo "--- create container ---"
86+
docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash
87+
echo "--- docker ps ---"
88+
docker ps
89+
echo "--- collect logs ---"
90+
docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \
91+
&& bash install_nc.sh \
92+
&& bash ut/collect_log_3x.sh pt"
93+
displayName: "collect logs"
94+
95+
- task: PublishPipelineArtifact@1
96+
condition: succeededOrFailed()
97+
inputs:
98+
targetPath: $(UPLOAD_PATH)
99+
artifact: $(ARTIFACT_NAME)
100+
publishLocation: "pipeline"
101+
102+
- task: Bash@3
103+
condition: always()
104+
inputs:
105+
targetType: "inline"
106+
script: |
107+
docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true"
108+
displayName: "Docker clean up"

.azure-pipelines/ut-basic-no-cover.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ pr:
1515
- .azure-pipelines/scripts/ut
1616
exclude:
1717
- test/neural_coder
18+
- test/3x
19+
- neural_compressor/common
20+
- neural_compressor/torch
1821

1922
pool: ICX-16C
2023

.azure-pipelines/ut-basic.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ pr:
1515
- .azure-pipelines/scripts/ut
1616
exclude:
1717
- test/neural_coder
18+
- test/3x
19+
- neural_compressor/common
20+
- neural_compressor/torch
1821

1922
pool: ICX-16C
2023

0 commit comments

Comments
 (0)