From 16348d8504d5376808066aaed4f9864254bcfcf2 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Thu, 25 Sep 2025 10:07:41 +0800 Subject: [PATCH 01/20] update torch version and docker Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/Dockerfile.devel | 8 +++ .../docker/DockerfileCodeScan.devel | 4 +- .azure-pipelines/docker/ubuntu-2404.devel | 51 +++++++++++++++++++ .azure-pipelines/scripts/install_nc.sh | 4 +- .../models/run_pytorch_models_trigger.sh | 2 +- .azure-pipelines/scripts/ut/3x/run_3x_pt.sh | 2 +- .../scripts/ut/run_basic_adaptor.sh | 2 +- .azure-pipelines/template/docker-template.yml | 3 +- .azure-pipelines/template/ut-template.yml | 5 +- .azure-pipelines/ut-3x-pt.yml | 7 ++- .azure-pipelines/ut-3x-tf.yml | 3 +- .azure-pipelines/ut-basic.yml | 3 +- 12 files changed, 82 insertions(+), 12 deletions(-) create mode 100644 .azure-pipelines/docker/ubuntu-2404.devel diff --git a/.azure-pipelines/docker/Dockerfile.devel b/.azure-pipelines/docker/Dockerfile.devel index fbb7b8c4e57..974feade882 100644 --- a/.azure-pipelines/docker/Dockerfile.devel +++ b/.azure-pipelines/docker/Dockerfile.devel @@ -36,6 +36,14 @@ RUN apt-get update && apt-get install -y --no-install-recommends --fix-missing \ RUN ln -sf $(which python3) /usr/bin/python +ARG USER_ID=1000 +ARG GROUP_ID=1000 + +RUN groupadd -g ${GROUP_ID} hostgroup && \ + useradd -m -u ${USER_ID} -g ${GROUP_ID} hostuser + +USER hostuser + RUN python -m pip install pip packaging --upgrade RUN python -m pip install --no-cache-dir setuptools diff --git a/.azure-pipelines/docker/DockerfileCodeScan.devel b/.azure-pipelines/docker/DockerfileCodeScan.devel index 611fe02e235..26b743ff295 100644 --- a/.azure-pipelines/docker/DockerfileCodeScan.devel +++ b/.azure-pipelines/docker/DockerfileCodeScan.devel @@ -30,9 +30,11 @@ RUN apt-get update && apt-get install -y --no-install-recommends --fix-missing \ RUN ln -sf $(which python3) /usr/bin/python +ARG USER_ID=1000 +ARG GROUP_ID=1000 + RUN python -m pip install --no-cache-dir \ bandit\ - pyspelling\ pydocstyle WORKDIR / diff --git a/.azure-pipelines/docker/ubuntu-2404.devel b/.azure-pipelines/docker/ubuntu-2404.devel new file mode 100644 index 00000000000..158fcb4c01f --- /dev/null +++ b/.azure-pipelines/docker/ubuntu-2404.devel @@ -0,0 +1,51 @@ +# +# Copyright (c) 2025 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +ARG UBUNTU_VER=24.04 +FROM ubuntu:${UBUNTU_VER} + +# See http://bugs.python.org/issue19846 +ENV LANG C.UTF-8 + +RUN apt-get update && apt-get install -y --no-install-recommends --fix-missing \ + python3 \ + python3-pip \ + python3.12-dev \ + autoconf \ + build-essential \ + git \ + libomp-dev \ + numactl \ + time \ + wget \ + bc \ + jq \ + vim + +RUN ln -sf $(which python3) /usr/bin/python + +ARG USER_ID=1000 +ARG GROUP_ID=1000 + +RUN groupadd -g ${GROUP_ID} hostgroup && \ + useradd -m -u ${USER_ID} -g ${GROUP_ID} hostuser + +USER hostuser + +ENV PATH="/home/hostuser/.local/bin:$PATH" +RUN pip config set global.break-system-packages true +RUN pip list + +WORKDIR / + diff --git a/.azure-pipelines/scripts/install_nc.sh b/.azure-pipelines/scripts/install_nc.sh index 8f9f8c0499e..30ba68510c0 100644 --- a/.azure-pipelines/scripts/install_nc.sh +++ b/.azure-pipelines/scripts/install_nc.sh @@ -10,8 +10,8 @@ if [[ $1 = *"3x_pt"* ]]; then python setup.py pt bdist_wheel else echo -e "\n Install torch CPU ... " - pip install torch==2.7.1 torchvision --index-url https://download.pytorch.org/whl/cpu - python -m pip install intel-extension-for-pytorch==2.7.0 oneccl_bind_pt --index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ + pip install torch==2.8.0 torchvision --index-url https://download.pytorch.org/whl/cpu + python -m pip install intel-extension-for-pytorch==2.8.0 oneccl_bind_pt --index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ python -m pip install --no-cache-dir -r requirements.txt python setup.py bdist_wheel fi diff --git a/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh b/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh index c5627cc7c00..2e501432f59 100644 --- a/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh +++ b/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh @@ -72,7 +72,7 @@ FRAMEWORK="pytorch" source /neural-compressor/.azure-pipelines/scripts/fwk_version.sh 'latest' if [[ "${inc_new_api}" == "3x"* ]]; then FRAMEWORK_VERSION="latest" - export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=${HOME}/.local/lib/:$LD_LIBRARY_PATH else FRAMEWORK_VERSION=${pytorch_version} TORCH_VISION_VERSION=${torchvision_version} diff --git a/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh b/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh index 098c2f6e640..4e5ad4ee375 100644 --- a/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh +++ b/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh @@ -11,7 +11,7 @@ echo "##[section]import check pass" # install requirements echo "##[group]set up UT env..." -export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH +export LD_LIBRARY_PATH=${HOME}/.local/lib/:$LD_LIBRARY_PATH sed -i '/^deepspeed/d' /neural-compressor/test/3x/torch/requirements.txt pip install -r /neural-compressor/test/3x/torch/requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu pip install pytest-cov diff --git a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh index 3f05474effa..762e0d6e514 100644 --- a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh +++ b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh @@ -9,7 +9,7 @@ source /neural-compressor/.azure-pipelines/scripts/fwk_version.sh $1 echo "set up UT env..." bash /neural-compressor/.azure-pipelines/scripts/ut/env_setup.sh "${test_case}" pip install cmake==4.0.0 -export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH +export LD_LIBRARY_PATH=${HOME}/.local/lib/:$LD_LIBRARY_PATH export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.file lpot_path=$(python -c 'import neural_compressor; import os; print(os.path.dirname(neural_compressor.__file__))') cd /neural-compressor/test || exit 1 diff --git a/.azure-pipelines/template/docker-template.yml b/.azure-pipelines/template/docker-template.yml index a7625fc4d60..4585f8d581e 100644 --- a/.azure-pipelines/template/docker-template.yml +++ b/.azure-pipelines/template/docker-template.yml @@ -63,7 +63,8 @@ steps: - script: | docker image prune -a -f if [[ ! $(docker images | grep -i ${{ parameters.repoName }}:${{ parameters.repoTag }}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/${{parameters.dockerFileName}}.devel -t ${{ parameters.repoName }}:${{ parameters.repoTag }} . + docker build --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) \ + -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/${{parameters.dockerFileName}}.devel -t ${{ parameters.repoName }}:${{ parameters.repoTag }} . fi docker images | grep -i ${{ parameters.repoName }} if [[ $? -ne 0 ]]; then diff --git a/.azure-pipelines/template/ut-template.yml b/.azure-pipelines/template/ut-template.yml index d8908d22a35..45a80086c6e 100644 --- a/.azure-pipelines/template/ut-template.yml +++ b/.azure-pipelines/template/ut-template.yml @@ -2,6 +2,9 @@ parameters: - name: dockerConfigName type: string default: "commonDockerConfig" + - name: dockerFileName + type: string + default: "Dockerfile" - name: repo type: string default: "https://github.com/intel/neural-compressor" @@ -27,7 +30,7 @@ steps: dockerConfigName: ${{ parameters.dockerConfigName }} repoName: "neural-compressor" repoTag: "py310" - dockerFileName: "Dockerfile" + dockerFileName: "${{ parameters.dockerFileName }}" containerName: ${{ parameters.utContainerName }} repo: ${{ parameters.repo }} imageSource: ${{ parameters.imageSource }} diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml index 68083d3a2c6..2b8ccf7069f 100644 --- a/.azure-pipelines/ut-3x-pt.yml +++ b/.azure-pipelines/ut-3x-pt.yml @@ -25,7 +25,7 @@ pool: ICX-16C variables: IMAGE_NAME: "neural-compressor" - IMAGE_TAG: "py310" + IMAGE_TAG: "py312" UPLOAD_PATH: $(Build.SourcesDirectory)/log_dir DOWNLOAD_PATH: $(Build.SourcesDirectory)/log_dir ARTIFACT_NAME: "UT_coverage_report_3x_pt" @@ -42,6 +42,7 @@ stages: - template: template/ut-template.yml parameters: dockerConfigName: "commonDockerConfig" + dockerFileName: "ubuntu-2404" utScriptFileName: "3x/run_3x_pt" uploadPath: $(UPLOAD_PATH) utArtifact: "ut_3x" @@ -57,6 +58,7 @@ stages: - template: template/ut-template.yml parameters: dockerConfigName: "gitCloneDockerConfig" + dockerFileName: "ubuntu-2404" utScriptFileName: "3x/run_3x_pt" uploadPath: $(UPLOAD_PATH) utArtifact: "ut_3x_baseline" @@ -72,7 +74,8 @@ stages: steps: - script: | if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . + docker build --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) \ + -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/ubuntu-2404.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . fi docker images | grep -i ${IMAGE_NAME} if [[ $? -ne 0 ]]; then diff --git a/.azure-pipelines/ut-3x-tf.yml b/.azure-pipelines/ut-3x-tf.yml index d2733b1149b..17d17d8f678 100644 --- a/.azure-pipelines/ut-3x-tf.yml +++ b/.azure-pipelines/ut-3x-tf.yml @@ -69,7 +69,8 @@ stages: steps: - script: | if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . + docker build --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) \ + -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . fi docker images | grep -i ${IMAGE_NAME} if [[ $? -ne 0 ]]; then diff --git a/.azure-pipelines/ut-basic.yml b/.azure-pipelines/ut-basic.yml index c4e9c9fdcfb..20692b3060e 100644 --- a/.azure-pipelines/ut-basic.yml +++ b/.azure-pipelines/ut-basic.yml @@ -248,7 +248,8 @@ stages: steps: - script: | if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . + docker build --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) \ + -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . fi docker images | grep -i ${IMAGE_NAME} if [[ $? -ne 0 ]]; then From f52031dd3ac2e8005ccdf98b6ddaa8e23ebd3711 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Thu, 25 Sep 2025 10:21:54 +0800 Subject: [PATCH 02/20] fix dockerfile Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/ubuntu-2404.devel | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.azure-pipelines/docker/ubuntu-2404.devel b/.azure-pipelines/docker/ubuntu-2404.devel index 158fcb4c01f..3577af788ad 100644 --- a/.azure-pipelines/docker/ubuntu-2404.devel +++ b/.azure-pipelines/docker/ubuntu-2404.devel @@ -45,6 +45,8 @@ USER hostuser ENV PATH="/home/hostuser/.local/bin:$PATH" RUN pip config set global.break-system-packages true +RUN python -m pip install pip packaging --upgrade +RUN python -m pip install --no-cache-dir setuptools RUN pip list WORKDIR / From 9cd1e4efb6b8720e0abc57f95db7ac9b27fa6c28 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Mon, 27 Oct 2025 16:12:02 +0800 Subject: [PATCH 03/20] Add docker clean up Signed-off-by: Sun, Xuehao --- .azure-pipelines/template/code-scan-template.yml | 3 ++- .azure-pipelines/template/model-template.yml | 3 ++- .azure-pipelines/template/ut-template.yml | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.azure-pipelines/template/code-scan-template.yml b/.azure-pipelines/template/code-scan-template.yml index b8d121b4991..0aeb5389d06 100644 --- a/.azure-pipelines/template/code-scan-template.yml +++ b/.azure-pipelines/template/code-scan-template.yml @@ -38,5 +38,6 @@ steps: inputs: targetType: "inline" script: | - docker exec ${{ parameters.codeScanContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" + docker stop ${{ parameters.codeScanContainerName }} + docker rm -vf ${{ parameters.codeScanContainerName }} || true displayName: "Docker clean up" diff --git a/.azure-pipelines/template/model-template.yml b/.azure-pipelines/template/model-template.yml index 20fdd1a8cba..50a8072d479 100644 --- a/.azure-pipelines/template/model-template.yml +++ b/.azure-pipelines/template/model-template.yml @@ -76,5 +76,6 @@ steps: inputs: targetType: "inline" script: | - docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" + docker stop ${{ parameters.modelContainerName }} + docker rm -vf ${{ parameters.modelContainerName }} || true displayName: "Docker clean up" diff --git a/.azure-pipelines/template/ut-template.yml b/.azure-pipelines/template/ut-template.yml index 45a80086c6e..cc27116e61c 100644 --- a/.azure-pipelines/template/ut-template.yml +++ b/.azure-pipelines/template/ut-template.yml @@ -60,5 +60,6 @@ steps: inputs: targetType: "inline" script: | - docker exec ${{ parameters.utContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" + docker stop ${{ parameters.utContainerName }} + docker rm -vf ${{ parameters.utContainerName }} || true displayName: "Docker clean up" From c3ef898ada9d5067b522c950bffdcf12d674c961 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 28 Oct 2025 09:55:23 +0800 Subject: [PATCH 04/20] remove unused code Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/Dockerfile.devel | 1 - .azure-pipelines/docker/ubuntu-2404.devel | 1 - .../models/run_onnxrt_models_trigger.sh | 62 --------- .../models/run_pytorch_models_trigger.sh | 28 +---- .../models/run_tensorflow_models_trigger.sh | 118 ------------------ .azure-pipelines/template/docker-template.yml | 4 +- .azure-pipelines/template/model-template.yml | 2 +- .github/workflows/check-stale-issue.yml | 1 + README.md | 2 +- neural_compressor/version.py | 2 +- 10 files changed, 7 insertions(+), 214 deletions(-) delete mode 100644 .azure-pipelines/scripts/models/run_onnxrt_models_trigger.sh delete mode 100644 .azure-pipelines/scripts/models/run_tensorflow_models_trigger.sh diff --git a/.azure-pipelines/docker/Dockerfile.devel b/.azure-pipelines/docker/Dockerfile.devel index 974feade882..069f918f457 100644 --- a/.azure-pipelines/docker/Dockerfile.devel +++ b/.azure-pipelines/docker/Dockerfile.devel @@ -50,4 +50,3 @@ RUN python -m pip install --no-cache-dir setuptools RUN pip list WORKDIR / - diff --git a/.azure-pipelines/docker/ubuntu-2404.devel b/.azure-pipelines/docker/ubuntu-2404.devel index 3577af788ad..0d60d3d3252 100644 --- a/.azure-pipelines/docker/ubuntu-2404.devel +++ b/.azure-pipelines/docker/ubuntu-2404.devel @@ -50,4 +50,3 @@ RUN python -m pip install --no-cache-dir setuptools RUN pip list WORKDIR / - diff --git a/.azure-pipelines/scripts/models/run_onnxrt_models_trigger.sh b/.azure-pipelines/scripts/models/run_onnxrt_models_trigger.sh deleted file mode 100644 index d48a115bea5..00000000000 --- a/.azure-pipelines/scripts/models/run_onnxrt_models_trigger.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -set -eo pipefail -# get parameters -PATTERN='[-a-zA-Z0-9_]*=' - -for i in "$@" -do - case $i in - --model=*) - model=`echo $i | sed "s/${PATTERN}//"`;; - --mode=*) - mode=`echo $i | sed "s/${PATTERN}//"`;; - --USE_TUNE_ACC=*) - USE_TUNE_ACC=`echo $i | sed "s/${PATTERN}//"`;; - --PERF_STABLE_CHECK=*) - PERF_STABLE_CHECK=`echo $i | sed "s/${PATTERN}//"`;; - --BUILD_BUILDID=*) - BUILD_BUILDID=`echo $i | sed "s/${PATTERN}//"`;; - *) - echo "Parameter $i not recognized."; exit 1;; - esac -done - -echo "specify FWs version..." -source /neural-compressor/.azure-pipelines/scripts/fwk_version.sh 'latest' -FRAMEWORK="onnxrt" -FRAMEWORK_VERSION=${onnxruntime_version} - -inc_new_api=false -# ======== set up config for onnxrt models ======== -if [ "${model}" == "resnet50-v1-12" ]; then - model_src_dir="image_recognition/onnx_model_zoo/resnet50/quantization/ptq_static" - dataset_location="/tf_dataset2/datasets/imagenet/ImagenetRaw/ImagenetRaw_small_5000/ILSVRC2012_img_val" - input_model="/tf_dataset2/models/onnx/resnet50-v1-12/resnet50-v1-12.onnx" - yaml="resnet50_v1_5.yaml" - strategy="basic" - batch_size=1 - new_benchmark=true - inc_new_api=true - tuning_cmd="bash run_quant.sh --input_model=${input_model} --dataset_location=${dataset_location}" - benchmark_cmd="bash run_benchmark.sh --config=${yaml} --mode=performance --dataset_location=${dataset_location}" -fi - - -/bin/bash run_model_trigger_common.sh \ - --yaml=${yaml} \ - --framework=${FRAMEWORK} \ - --fwk_ver=${FRAMEWORK_VERSION} \ - --model=${model} \ - --model_src_dir=${model_src_dir} \ - --dataset_location=${dataset_location} \ - --input_model=${input_model} \ - --batch_size=${batch_size} \ - --strategy=${strategy} \ - --new_benchmark=${new_benchmark} \ - --tuning_cmd="${tuning_cmd}" \ - --benchmark_cmd="${benchmark_cmd}" \ - --inc_new_api="${inc_new_api}" \ - --mode=${mode} \ - --USE_TUNE_ACC=${USE_TUNE_ACC} \ - --PERF_STABLE_CHECK=${PERF_STABLE_CHECK} \ - --BUILD_BUILDID=${BUILD_BUILDID} \ No newline at end of file diff --git a/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh b/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh index 2e501432f59..16e1f2c9189 100644 --- a/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh +++ b/.azure-pipelines/scripts/models/run_pytorch_models_trigger.sh @@ -30,29 +30,7 @@ new_benchmark=true inc_new_api=true benchmark_cmd="" # ======== set up config for pytorch models ======== -if [ "${model}" == "resnet18" ]; then - model_src_dir="image_recognition/torchvision_models/quantization/ptq/cpu/eager" - dataset_location="/tf_dataset2/datasets/mini-imageraw" - input_model="" - yaml="conf.yaml" - strategy="bayesian" - batch_size=1 - new_benchmark=false - inc_new_api=false - tuning_cmd="bash run_tuning.sh --topology=resnet18 --dataset_location=${dataset_location} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --topology=resnet18 --dataset_location=${dataset_location} --mode=benchmark --batch_size=${batch_size} --iters=500" -elif [ "${model}" == "resnet18_fx" ]; then - model_src_dir="image_recognition/torchvision_models/quantization/ptq/cpu/fx/" - dataset_location="/tf_dataset2/datasets/mini-imageraw" - input_model="resnet18" - yaml="" - strategy="basic" - batch_size=1 - new_benchmark=true - inc_new_api=true - tuning_cmd="bash run_quant.sh --topology=resnet18 --dataset_location=${dataset_location} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --topology=resnet18 --dataset_location=${dataset_location} --mode=performance --batch_size=${batch_size} --iters=500" -elif [ "${model}" == "opt_125m_woq_gptq_int4" ]; then +if [ "${model}" == "opt_125m_woq_gptq_int4" ]; then model_src_dir="nlp/huggingface_models/language-modeling/quantization/weight_only" inc_new_api=3x_pt tuning_cmd="bash run_quant.sh --topology=opt_125m_woq_gptq_int4" @@ -60,10 +38,6 @@ elif [ "${model}" == "opt_125m_woq_gptq_nf4_dq_bnb" ]; then model_src_dir="nlp/huggingface_models/language-modeling/quantization/weight_only" inc_new_api=3x_pt tuning_cmd="bash run_quant.sh --topology=opt_125m_woq_gptq_nf4_dq_bnb" -elif [ "${model}" == "opt_125m_woq_gptq_int4_dq_ggml" ]; then - model_src_dir="nlp/huggingface_models/language-modeling/quantization/weight_only" - inc_new_api=3x_pt - tuning_cmd="bash run_quant.sh --topology=opt_125m_woq_gptq_int4_dq_ggml" fi echo "Specify FWs version..." diff --git a/.azure-pipelines/scripts/models/run_tensorflow_models_trigger.sh b/.azure-pipelines/scripts/models/run_tensorflow_models_trigger.sh deleted file mode 100644 index 505fad4a6e5..00000000000 --- a/.azure-pipelines/scripts/models/run_tensorflow_models_trigger.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/bin/bash -set -eo pipefail -# get parameters -PATTERN='[-a-zA-Z0-9_]*=' - -for i in "$@" -do - case $i in - --model=*) - model=`echo $i | sed "s/${PATTERN}//"`;; - --mode=*) - mode=`echo $i | sed "s/${PATTERN}//"`;; - --USE_TUNE_ACC=*) - USE_TUNE_ACC=`echo $i | sed "s/${PATTERN}//"`;; - --PERF_STABLE_CHECK=*) - PERF_STABLE_CHECK=`echo $i | sed "s/${PATTERN}//"`;; - --BUILD_BUILDID=*) - BUILD_BUILDID=`echo $i | sed "s/${PATTERN}//"`;; - *) - echo "Parameter $i not recognized."; exit 1;; - esac -done - -echo "specify FWs version..." -source /neural-compressor/.azure-pipelines/scripts/fwk_version.sh 'latest' -FRAMEWORK="tensorflow" -FRAMEWORK_VERSION=${tensorflow_version} - -inc_new_api=false -# ======== set up config for tensorflow models ======== -if [ "${model}" == "resnet50v1.5" ]; then - model_src_dir="image_recognition/tensorflow_models/resnet50_v1_5/quantization/ptq" - dataset_location="/tf_dataset/dataset/TF_mini_imagenet" - input_model="/tf_dataset/pre-trained-models/resnet50v1_5/fp32/resnet50_v1.pb" - new_benchmark=true - inc_new_api=true - tuning_cmd="bash run_quant.sh --dataset_location=${dataset_location} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --dataset_location=${dataset_location} --batch_size=1 --mode=performance" -elif [ "${model}" == "ssd_resnet50_v1" ];then - model_src_dir="object_detection/tensorflow_models/ssd_resnet50_v1/quantization/ptq" - dataset_location="/tf_dataset/tensorflow/mini-coco-100.record" - input_model="/tf_dataset/pre-train-model-oob/object_detection/ssd_resnet50_v1/frozen_inference_graph.pb" - new_benchmark=true - inc_new_api=true - tuning_cmd="bash run_quant.sh --dataset_location=${dataset_location} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --dataset_location=${dataset_location} --batch_size=1 --mode=performance" -elif [ "${model}" == "ssd_mobilenet_v1_ckpt" ];then - model_src_dir="object_detection/tensorflow_models/ssd_mobilenet_v1/quantization/ptq" - dataset_location="/tf_dataset/tensorflow/mini-coco-100.record" - input_model="/tf_dataset/pre-train-model-oob/object_detection/ssd_mobilenet_v1" - new_benchmark=true - inc_new_api=true - tuning_cmd="bash run_quant.sh --dataset_location=${dataset_location} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --dataset_location=${dataset_location} --batch_size=1 --mode=performance" -elif [ "${model}" == "inception_v1" ]; then - model_src_dir="image_recognition/tensorflow_models/quantization/ptq" - dataset_location="/tf_dataset/dataset/TF_mini_imagenet" - input_model="/tf_dataset/pre-train-model-slim/pbfile/frozen_pb/frozen_inception_v1.pb" - yaml="inception_v1.yaml" - strategy="basic" - batch_size=1 - new_benchmark=true - tuning_cmd="bash run_tuning.sh --config=${yaml} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --config=${yaml} --mode=performance" -elif [ "${model}" == "darknet19" ]; then - model_src_dir="oob_models/quantization/ptq" - dataset_location="" - input_model="/tf_dataset/tensorflow/tf_oob_models/ov/all_tf_models/PublicInHouse/classification/darknet19/darknet19.pb" - yaml="config.yaml" - strategy="basic" - batch_size=1 - new_benchmark=false - inc_new_api=true - tuning_cmd="bash run_quant.sh --topology=${model} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --topology=${model} --mode=performance --batch_size=1 --iters=500" -elif [ "${model}" == "densenet-121" ]; then - model_src_dir="oob_models/quantization/ptq" - dataset_location="" - input_model="/tf_dataset/tensorflow/tf_oob_models/ov/all_tf_models/classification/densenet/121/tf/densenet-121.pb" - yaml="config.yaml" - strategy="basic" - batch_size=1 - new_benchmark=false - inc_new_api=true - tuning_cmd="bash run_quant.sh --topology=${model} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --topology=${model} --mode=performance --batch_size=1 --iters=500" -elif [ "${model}" == "resnet-101" ]; then - model_src_dir="oob_models/quantization/ptq" - dataset_location="" - input_model="/tf_dataset/tensorflow/tf_oob_models/ov/all_tf_models/classification/resnet/v1/101/tf/resnet-101.pb" - yaml="config.yaml" - strategy="basic" - batch_size=1 - new_benchmark=false - inc_new_api=true - tuning_cmd="bash run_quant.sh --topology=${model} --input_model=${input_model}" - benchmark_cmd="bash run_benchmark.sh --topology=${model} --mode=performance --batch_size=1 --iters=500" -fi - - -/bin/bash run_model_trigger_common.sh \ - --yaml=${yaml} \ - --framework=${FRAMEWORK} \ - --fwk_ver=${FRAMEWORK_VERSION} \ - --model=${model} \ - --model_src_dir=${model_src_dir} \ - --dataset_location=${dataset_location} \ - --input_model=${input_model} \ - --batch_size=${batch_size} \ - --strategy=${strategy} \ - --new_benchmark=${new_benchmark} \ - --tuning_cmd="${tuning_cmd}" \ - --benchmark_cmd="${benchmark_cmd}" \ - --inc_new_api="${inc_new_api}" \ - --mode=${mode} \ - --USE_TUNE_ACC=${USE_TUNE_ACC} \ - --PERF_STABLE_CHECK=${PERF_STABLE_CHECK} \ - --BUILD_BUILDID=${BUILD_BUILDID} diff --git a/.azure-pipelines/template/docker-template.yml b/.azure-pipelines/template/docker-template.yml index 4585f8d581e..25d143bd8d5 100644 --- a/.azure-pipelines/template/docker-template.yml +++ b/.azure-pipelines/template/docker-template.yml @@ -75,7 +75,7 @@ steps: - ${{ if eq(parameters.imageSource, 'pull') }}: - script: | - docker pull vault.habana.ai/gaudi-docker/1.22.0/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest + docker pull vault.habana.ai/gaudi-docker/1.22.1/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest displayName: "Pull habana docker image" - script: | @@ -96,7 +96,7 @@ steps: else docker run -dit --disable-content-trust --privileged --name=${{ parameters.containerName }} --shm-size="2g" \ --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --net=host --ipc=host \ - -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor vault.habana.ai/gaudi-docker/1.22.0/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest + -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor vault.habana.ai/gaudi-docker/1.22.1/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest docker exec ${{ parameters.containerName }} bash -c "ln -sf \$(which python3) /usr/bin/python" fi echo "Show the container list after docker run ... " diff --git a/.azure-pipelines/template/model-template.yml b/.azure-pipelines/template/model-template.yml index 50a8072d479..be9daae02cd 100644 --- a/.azure-pipelines/template/model-template.yml +++ b/.azure-pipelines/template/model-template.yml @@ -18,7 +18,7 @@ steps: parameters: dockerConfigName: "commonDockerConfig" repoName: "neural-compressor" - repoTag: "py310" + repoTag: "py312" dockerFileName: "Dockerfile" containerName: ${{ parameters.modelContainerName }} diff --git a/.github/workflows/check-stale-issue.yml b/.github/workflows/check-stale-issue.yml index 6af25375328..46d0d38e7aa 100644 --- a/.github/workflows/check-stale-issue.yml +++ b/.github/workflows/check-stale-issue.yml @@ -14,6 +14,7 @@ jobs: steps: - uses: actions/stale@v10 with: + start-date: '2020-01-01T00:00:00Z' days-before-issue-stale: 60 days-before-issue-close: 7 stale-issue-message: "This issue is stale because it has been open 60 days with no activity. Remove stale label or comment or this will be closed in 7 days." diff --git a/README.md b/README.md index 7831990f6ea..a91220d8f4d 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ To try on Intel Gaudi2, docker image with Gaudi Software Stack is recommended, p Run a container with an interactive shell, [more info](https://docs.habana.ai/en/latest/Installation_Guide/Additional_Installation/Docker_Installation.html#docker-installation) ``` -docker run -it --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --net=host --ipc=host vault.habana.ai/gaudi-docker/1.22.0/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest +docker run -it --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --net=host --ipc=host vault.habana.ai/gaudi-docker/1.22.1/ubuntu24.04/habanalabs/pytorch-installer-2.7.1:latest ``` > Note: Since Habana software >= 1.21.0, `PT_HPU_LAZY_MODE=0` is the default setting. However, most low-precision functions (such as `convert_from_uint4`) do not support this setting. Therefore, we recommend setting `PT_HPU_LAZY_MODE=1` to maintain compatibility. diff --git a/neural_compressor/version.py b/neural_compressor/version.py index 6f857d8faca..732a1dd612c 100644 --- a/neural_compressor/version.py +++ b/neural_compressor/version.py @@ -15,4 +15,4 @@ # See the License for the specific language governing permissions and # limitations under the License. """IntelĀ® Neural Compressor: An open-source Python library supporting popular model compression techniques.""" -__version__ = "3.6" +__version__ = "3.7" From 5f6037c6196a41308a142cb8e67592c5a7b82d39 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 28 Oct 2025 10:19:46 +0800 Subject: [PATCH 05/20] fix path Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/Dockerfile.devel | 1 + .github/workflows/check-stale-issue.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.azure-pipelines/docker/Dockerfile.devel b/.azure-pipelines/docker/Dockerfile.devel index 069f918f457..4d021f440ba 100644 --- a/.azure-pipelines/docker/Dockerfile.devel +++ b/.azure-pipelines/docker/Dockerfile.devel @@ -44,6 +44,7 @@ RUN groupadd -g ${GROUP_ID} hostgroup && \ USER hostuser +ENV PATH="/home/hostuser/.local/bin:$PATH" RUN python -m pip install pip packaging --upgrade RUN python -m pip install --no-cache-dir setuptools diff --git a/.github/workflows/check-stale-issue.yml b/.github/workflows/check-stale-issue.yml index 46d0d38e7aa..2aae86025ce 100644 --- a/.github/workflows/check-stale-issue.yml +++ b/.github/workflows/check-stale-issue.yml @@ -7,6 +7,7 @@ permissions: on: schedule: - cron: "30 22 * * *" + workflow_dispatch: jobs: stale: From 3062a93638c1aaec22c98e7af7ff9b073f06e8d9 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Wed, 5 Nov 2025 15:34:50 +0800 Subject: [PATCH 06/20] fix cmake Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/ubuntu-2404.devel | 1 + .azure-pipelines/scripts/install_nc.sh | 1 - .azure-pipelines/scripts/models/env_setup.sh | 5 ++--- .azure-pipelines/scripts/ut/3x/run_3x_tf.sh | 1 - .azure-pipelines/scripts/ut/env_setup.sh | 2 -- .azure-pipelines/scripts/ut/run_basic_adaptor.sh | 1 - .azure-pipelines/template/model-template.yml | 1 + .azure-pipelines/template/ut-template.yml | 1 + 8 files changed, 5 insertions(+), 8 deletions(-) diff --git a/.azure-pipelines/docker/ubuntu-2404.devel b/.azure-pipelines/docker/ubuntu-2404.devel index 0d60d3d3252..c717d137d7e 100644 --- a/.azure-pipelines/docker/ubuntu-2404.devel +++ b/.azure-pipelines/docker/ubuntu-2404.devel @@ -24,6 +24,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends --fix-missing \ python3.12-dev \ autoconf \ build-essential \ + cmake \ git \ libomp-dev \ numactl \ diff --git a/.azure-pipelines/scripts/install_nc.sh b/.azure-pipelines/scripts/install_nc.sh index 30ba68510c0..8f6f7b4800b 100644 --- a/.azure-pipelines/scripts/install_nc.sh +++ b/.azure-pipelines/scripts/install_nc.sh @@ -2,7 +2,6 @@ echo -e "##[group]Install Neural Compressor ... " cd /neural-compressor -pip install cmake==3.31.6 if [[ $1 = *"3x_pt"* ]]; then python -m pip install --no-cache-dir -r requirements_pt.txt if [[ $1 = *"3x_pt_fp8"* ]]; then diff --git a/.azure-pipelines/scripts/models/env_setup.sh b/.azure-pipelines/scripts/models/env_setup.sh index 0182b2781c9..852019466b1 100644 --- a/.azure-pipelines/scripts/models/env_setup.sh +++ b/.azure-pipelines/scripts/models/env_setup.sh @@ -50,10 +50,9 @@ done SCRIPTS_PATH="/neural-compressor/.azure-pipelines/scripts/models" log_dir="/neural-compressor/.azure-pipelines/scripts/models" if [[ "${inc_new_api}" == "3x"* ]]; then - pip install cmake==3.31.6 WORK_SOURCE_DIR="/neural-compressor/examples/${framework}" - git clone https://github.com/intel/intel-extension-for-transformers.git /itrex - cd /itrex + git clone https://github.com/intel/intel-extension-for-transformers.git ~/itrex + cd ~/itrex pip install -r requirements.txt pip install -v . else diff --git a/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh b/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh index 60986e4cf60..317b261be4d 100644 --- a/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh +++ b/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh @@ -41,7 +41,6 @@ pytest --cov="${inc_path}" --cov-append -vs --disable-warnings --html=report_tf. # test for tensorflow new api ut pip uninstall tensorflow -y pip install /tf_dataset/tf_binary/230928/tensorflow*.whl -pip install cmake==3.31.6 pip install protobuf==3.20.3 pip install horovod==0.27.0 pip list diff --git a/.azure-pipelines/scripts/ut/env_setup.sh b/.azure-pipelines/scripts/ut/env_setup.sh index c4eed7f0779..f12ee6efa07 100644 --- a/.azure-pipelines/scripts/ut/env_setup.sh +++ b/.azure-pipelines/scripts/ut/env_setup.sh @@ -24,7 +24,6 @@ if [[ "${tensorflow_version}" == *"-official" ]]; then pip install tensorflow==${tensorflow_version%-official} elif [[ "${tensorflow_version}" == "spr-base" ]]; then pip install /tf_dataset/tf_binary/230928/tensorflow*.whl - pip install cmake==3.31.6 pip install protobuf==3.20.3 pip install horovod==0.27.0 if [[ $? -ne 0 ]]; then @@ -67,7 +66,6 @@ fi # install special test env requirements # common deps -pip install cmake==3.31.6 pip install transformers==4.50.0 if [[ $(echo "${test_case}" | grep -c "others") != 0 ]];then diff --git a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh index 762e0d6e514..0ed1d9c8255 100644 --- a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh +++ b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh @@ -8,7 +8,6 @@ source /neural-compressor/.azure-pipelines/scripts/fwk_version.sh $1 echo "set up UT env..." bash /neural-compressor/.azure-pipelines/scripts/ut/env_setup.sh "${test_case}" -pip install cmake==4.0.0 export LD_LIBRARY_PATH=${HOME}/.local/lib/:$LD_LIBRARY_PATH export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.file lpot_path=$(python -c 'import neural_compressor; import os; print(os.path.dirname(neural_compressor.__file__))') diff --git a/.azure-pipelines/template/model-template.yml b/.azure-pipelines/template/model-template.yml index be9daae02cd..060758bbbc3 100644 --- a/.azure-pipelines/template/model-template.yml +++ b/.azure-pipelines/template/model-template.yml @@ -76,6 +76,7 @@ steps: inputs: targetType: "inline" script: | + docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" docker stop ${{ parameters.modelContainerName }} docker rm -vf ${{ parameters.modelContainerName }} || true displayName: "Docker clean up" diff --git a/.azure-pipelines/template/ut-template.yml b/.azure-pipelines/template/ut-template.yml index cc27116e61c..78536b3476b 100644 --- a/.azure-pipelines/template/ut-template.yml +++ b/.azure-pipelines/template/ut-template.yml @@ -60,6 +60,7 @@ steps: inputs: targetType: "inline" script: | + docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" docker stop ${{ parameters.utContainerName }} docker rm -vf ${{ parameters.utContainerName }} || true displayName: "Docker clean up" From ac461921369e6cdd5246ff27bb81cfce2bde2a21 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 5 Nov 2025 07:36:19 +0000 Subject: [PATCH 07/20] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .azure-pipelines/template/model-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/template/model-template.yml b/.azure-pipelines/template/model-template.yml index 060758bbbc3..e8a6755bf2f 100644 --- a/.azure-pipelines/template/model-template.yml +++ b/.azure-pipelines/template/model-template.yml @@ -76,7 +76,7 @@ steps: inputs: targetType: "inline" script: | - docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" + docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" docker stop ${{ parameters.modelContainerName }} docker rm -vf ${{ parameters.modelContainerName }} || true displayName: "Docker clean up" From 2a0459b4978c55320eb379aa26aa5eec32436322 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Wed, 5 Nov 2025 15:36:52 +0800 Subject: [PATCH 08/20] fix name Signed-off-by: Sun, Xuehao --- .azure-pipelines/template/ut-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/template/ut-template.yml b/.azure-pipelines/template/ut-template.yml index 78536b3476b..f083e4a3bca 100644 --- a/.azure-pipelines/template/ut-template.yml +++ b/.azure-pipelines/template/ut-template.yml @@ -60,7 +60,7 @@ steps: inputs: targetType: "inline" script: | - docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" + docker exec ${{ parameters.utContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" docker stop ${{ parameters.utContainerName }} docker rm -vf ${{ parameters.utContainerName }} || true displayName: "Docker clean up" From 9451d0e27e1a08ba9e697c6ff7f77a2b2614b46c Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Wed, 5 Nov 2025 15:58:38 +0800 Subject: [PATCH 09/20] test Signed-off-by: Sun, Xuehao --- .azure-pipelines/docker/ubuntu-2404.devel | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/docker/ubuntu-2404.devel b/.azure-pipelines/docker/ubuntu-2404.devel index c717d137d7e..d279adf93fc 100644 --- a/.azure-pipelines/docker/ubuntu-2404.devel +++ b/.azure-pipelines/docker/ubuntu-2404.devel @@ -46,8 +46,8 @@ USER hostuser ENV PATH="/home/hostuser/.local/bin:$PATH" RUN pip config set global.break-system-packages true -RUN python -m pip install pip packaging --upgrade -RUN python -m pip install --no-cache-dir setuptools +RUN pip install pip packaging --upgrade +RUN pip install --no-cache-dir setuptools RUN pip list WORKDIR / From b34323e9054f7ad48544af928510fd3230f03ec6 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Fri, 14 Nov 2025 10:32:35 +0800 Subject: [PATCH 10/20] remove unused code Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/models/env_setup.sh | 37 +------------------- 1 file changed, 1 insertion(+), 36 deletions(-) diff --git a/.azure-pipelines/scripts/models/env_setup.sh b/.azure-pipelines/scripts/models/env_setup.sh index 852019466b1..ddbaa7a2676 100644 --- a/.azure-pipelines/scripts/models/env_setup.sh +++ b/.azure-pipelines/scripts/models/env_setup.sh @@ -49,15 +49,7 @@ done SCRIPTS_PATH="/neural-compressor/.azure-pipelines/scripts/models" log_dir="/neural-compressor/.azure-pipelines/scripts/models" -if [[ "${inc_new_api}" == "3x"* ]]; then - WORK_SOURCE_DIR="/neural-compressor/examples/${framework}" - git clone https://github.com/intel/intel-extension-for-transformers.git ~/itrex - cd ~/itrex - pip install -r requirements.txt - pip install -v . -else - WORK_SOURCE_DIR="/neural-compressor/examples/deprecated/${framework}" -fi +WORK_SOURCE_DIR="/neural-compressor/examples/${framework}" $BOLD_YELLOW && echo "processing ${framework}-${fwk_ver}-${model}" && $RESET @@ -74,17 +66,6 @@ $BOLD_YELLOW && echo "====== install requirements ======" && $RESET /bin/bash /neural-compressor/.azure-pipelines/scripts/install_nc.sh ${inc_new_api} mkdir -p ${WORK_SOURCE_DIR} -cd ${WORK_SOURCE_DIR} -if [[ "${inc_new_api}" == "false" ]]; then - echo "copy old api examples to workspace..." - git clone -b old_api_examples https://github.com/intel/neural-compressor.git old-lpot-models - cd old-lpot-models - git branch - cd - - rm -rf ${model_src_dir} - mkdir -p ${model_src_dir} - cp -r old-lpot-models/examples/${framework}/${model_src_dir} ${WORK_SOURCE_DIR}/${model_src_dir}/../ -fi cd ${model_src_dir} @@ -129,19 +110,3 @@ if [ -f "requirements.txt" ]; then else $BOLD_RED && echo "Not found requirements.txt file." && $RESET fi - -if [[ "${inc_new_api}" == "false" ]]; then - $BOLD_YELLOW && echo "======== update yaml config ========" && $RESET - $BOLD_YELLOW && echo -e "\nPrint origin yaml..." && $RESET - cat ${yaml} - python ${SCRIPTS_PATH}/update_yaml_config.py \ - --yaml=${yaml} \ - --framework=${framework} \ - --dataset_location=${dataset_location} \ - --batch_size=${batch_size} \ - --strategy=${strategy} \ - --new_benchmark=${new_benchmark} \ - --multi_instance='true' - $BOLD_YELLOW && echo -e "\nPrint updated yaml... " && $RESET - cat ${yaml} -fi From e22df1347f4920784607e5ce5f85568bc780b62f Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Mon, 17 Nov 2025 13:14:36 +0800 Subject: [PATCH 11/20] disable tf CI Signed-off-by: Sun, Xuehao --- .azure-pipelines/ut-basic.yml | 70 +++++++++++++++++------------------ 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/.azure-pipelines/ut-basic.yml b/.azure-pipelines/ut-basic.yml index 769f781ba6c..fddc5883a2e 100644 --- a/.azure-pipelines/ut-basic.yml +++ b/.azure-pipelines/ut-basic.yml @@ -79,43 +79,43 @@ stages: utScriptFileName: "run_basic_pt_pruning" uploadPath: $(UPLOAD_PATH) utArtifact: "ut_pt-pruning" - - job: - displayName: Test TensorFlow Pruning - steps: - - template: template/ut-template.yml - parameters: - dockerConfigName: "commonDockerConfig" - utScriptFileName: "run_basic_tf_pruning" - uploadPath: $(UPLOAD_PATH) - utArtifact: "ut_tf-pruning" + # - job: + # displayName: Test TensorFlow Pruning + # steps: + # - template: template/ut-template.yml + # parameters: + # dockerConfigName: "commonDockerConfig" + # utScriptFileName: "run_basic_tf_pruning" + # uploadPath: $(UPLOAD_PATH) + # utArtifact: "ut_tf-pruning" - - stage: TFNewAPI - displayName: Unit Test TF newAPI - dependsOn: [] - jobs: - - job: - displayName: Test TF newAPI - steps: - - template: template/ut-template.yml - parameters: - dockerConfigName: "commonDockerConfig" - utScriptFileName: "run_basic_adaptor_tfnewapi" - uploadPath: $(UPLOAD_PATH) - utArtifact: "ut_tfnewapi" + # - stage: TFNewAPI + # displayName: Unit Test TF newAPI + # dependsOn: [] + # jobs: + # - job: + # displayName: Test TF newAPI + # steps: + # - template: template/ut-template.yml + # parameters: + # dockerConfigName: "commonDockerConfig" + # utScriptFileName: "run_basic_adaptor_tfnewapi" + # uploadPath: $(UPLOAD_PATH) + # utArtifact: "ut_tfnewapi" - - stage: ITEX - displayName: Unit Test ITEX - dependsOn: [] - jobs: - - job: - displayName: Test ITEX - steps: - - template: template/ut-template.yml - parameters: - dockerConfigName: "commonDockerConfig" - utScriptFileName: "run_basic_itex" - uploadPath: $(UPLOAD_PATH) - utArtifact: "ut_itex" + # - stage: ITEX + # displayName: Unit Test ITEX + # dependsOn: [] + # jobs: + # - job: + # displayName: Test ITEX + # steps: + # - template: template/ut-template.yml + # parameters: + # dockerConfigName: "commonDockerConfig" + # utScriptFileName: "run_basic_itex" + # uploadPath: $(UPLOAD_PATH) + # utArtifact: "ut_itex" - stage: Others displayName: Unit Test other basic case From c493cbc7413b5019c64cca7ef96f6942384c04f4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Nov 2025 05:16:03 +0000 Subject: [PATCH 12/20] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .github/workflows/pr-link-scan.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pr-link-scan.yml b/.github/workflows/pr-link-scan.yml index edc1d5f7d14..dc0b7ac68b6 100644 --- a/.github/workflows/pr-link-scan.yml +++ b/.github/workflows/pr-link-scan.yml @@ -40,11 +40,11 @@ jobs: fail="FALSE" merged_commit=$(git log -1 --format='%H') changed_files="$(git diff --name-status --diff-filter=ARM $BASE_SHA ${merged_commit} | awk '/\.md$/ {print $NF}')" - + echo "no_proxy=$no_proxy" echo "http_proxy=$http_proxy" echo "https_proxy=$https_proxy" - + if [ -n "$changed_files" ]; then for changed_file in $changed_files; do # echo $changed_file @@ -118,11 +118,11 @@ jobs: merged_commit=$(git log -1 --format='%H') changed_files="$(git diff --name-status --diff-filter=ARM $BASE_SHA ${merged_commit} | awk '/\.md$/ {print $NF}')" png_lines=$(grep -Eo '\]\([^)]+\)' --include='*.md' -r .|grep -Ev 'http' | grep -Ev 'shape=' | grep -Ev 'mailto:inc.maintainers@intel.com') - + echo "no_proxy=$no_proxy" echo "http_proxy=$http_proxy" echo "https_proxy=$https_proxy" - + if [ -n "$png_lines" ]; then for png_line in $png_lines; do # echo "No.1----->png_line is $png_line" From 98ec5c17d118367d4837f8ccbbb94861fce1ce25 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Mon, 17 Nov 2025 13:58:30 +0800 Subject: [PATCH 13/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/models/env_setup.sh | 16 ++---------- .azure-pipelines/ut-basic.yml | 26 ++++++++++---------- 2 files changed, 15 insertions(+), 27 deletions(-) diff --git a/.azure-pipelines/scripts/models/env_setup.sh b/.azure-pipelines/scripts/models/env_setup.sh index ddbaa7a2676..bf946035ce8 100644 --- a/.azure-pipelines/scripts/models/env_setup.sh +++ b/.azure-pipelines/scripts/models/env_setup.sh @@ -65,21 +65,13 @@ fi $BOLD_YELLOW && echo "====== install requirements ======" && $RESET /bin/bash /neural-compressor/.azure-pipelines/scripts/install_nc.sh ${inc_new_api} -mkdir -p ${WORK_SOURCE_DIR} - -cd ${model_src_dir} +cd ${WORK_SOURCE_DIR}/${model_src_dir} if [[ "${fwk_ver}" != "latest" ]]; then pip install ruamel.yaml==0.17.40 pip install psutil pip install protobuf==4.23.4 - if [[ "${framework}" == "tensorflow" ]]; then - if [[ "${fwk_ver}" == *"-official" ]]; then - pip install tensorflow==${fwk_ver%-official} - else - pip install intel-tensorflow==${fwk_ver} - fi - elif [[ "${framework}" == "pytorch" ]]; then + if [[ "${framework}" == "pytorch" ]]; then pip install torch==${fwk_ver} --index-url https://download.pytorch.org/whl/cpu pip install torchvision==${torch_vision_ver} --index-url https://download.pytorch.org/whl/cpu elif [[ "${framework}" == "onnxrt" ]]; then @@ -93,10 +85,6 @@ if [ -f "requirements.txt" ]; then if [ "${framework}" == "onnxrt" ]; then sed -i '/^onnx>=/d;/^onnx==/d;/^onnxruntime>=/d;/^onnxruntime==/d' requirements.txt fi - if [ "${framework}" == "tensorflow" ]; then - sed -i '/tensorflow==/d;/tensorflow$/d' requirements.txt - sed -i '/^intel-tensorflow/d' requirements.txt - fi if [ "${framework}" == "pytorch" ]; then sed -i '/torch==/d;/torch$/d;/torchvision==/d;/torchvision$/d' requirements.txt fi diff --git a/.azure-pipelines/ut-basic.yml b/.azure-pipelines/ut-basic.yml index fddc5883a2e..7cdca06f47c 100644 --- a/.azure-pipelines/ut-basic.yml +++ b/.azure-pipelines/ut-basic.yml @@ -38,19 +38,19 @@ variables: REPO: $(Build.Repository.Uri) stages: - - stage: Adaptor - displayName: Unit Test FWKs adaptor - dependsOn: [] - jobs: - - job: - displayName: Test FWKs adaptor - steps: - - template: template/ut-template.yml - parameters: - dockerConfigName: "commonDockerConfig" - utScriptFileName: "run_basic_adaptor" - uploadPath: $(UPLOAD_PATH) - utArtifact: "ut_adaptor" + # - stage: Adaptor + # displayName: Unit Test FWKs adaptor + # dependsOn: [] + # jobs: + # - job: + # displayName: Test FWKs adaptor + # steps: + # - template: template/ut-template.yml + # parameters: + # dockerConfigName: "commonDockerConfig" + # utScriptFileName: "run_basic_adaptor" + # uploadPath: $(UPLOAD_PATH) + # utArtifact: "ut_adaptor" - stage: API displayName: Unit Test User facing API From e7adcd78045089022587d04444f404027e99827d Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Mon, 17 Nov 2025 16:36:29 +0800 Subject: [PATCH 14/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/ut/3x/collect_log_3x.sh | 1 + .azure-pipelines/ut-3x-pt-fp8.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh index 03f4fd02dbf..326b14ee2e0 100644 --- a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh +++ b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh @@ -1,3 +1,4 @@ +#!/bin/bash source /neural-compressor/.azure-pipelines/scripts/change_color.sh set -e diff --git a/.azure-pipelines/ut-3x-pt-fp8.yml b/.azure-pipelines/ut-3x-pt-fp8.yml index 11040891056..32f9941cd19 100644 --- a/.azure-pipelines/ut-3x-pt-fp8.yml +++ b/.azure-pipelines/ut-3x-pt-fp8.yml @@ -76,7 +76,7 @@ stages: steps: - script: | if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . + docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/ubuntu-2404.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . fi docker images | grep -i ${IMAGE_NAME} if [[ $? -ne 0 ]]; then From 4757831a4712c3148d933a4d38df870627bc36c0 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 09:31:06 +0800 Subject: [PATCH 15/20] fix coverage Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/install_nc.sh | 1 - .../scripts/ut/3x/collect_log_3x.sh | 36 +++++++++---------- .azure-pipelines/ut-3x-pt-fp8.yml | 30 ++-------------- .azure-pipelines/ut-3x-pt.yml | 31 ++-------------- 4 files changed, 24 insertions(+), 74 deletions(-) diff --git a/.azure-pipelines/scripts/install_nc.sh b/.azure-pipelines/scripts/install_nc.sh index 8f6f7b4800b..ba6f0f0e657 100644 --- a/.azure-pipelines/scripts/install_nc.sh +++ b/.azure-pipelines/scripts/install_nc.sh @@ -1,7 +1,6 @@ #!/bin/bash echo -e "##[group]Install Neural Compressor ... " -cd /neural-compressor if [[ $1 = *"3x_pt"* ]]; then python -m pip install --no-cache-dir -r requirements_pt.txt if [[ $1 = *"3x_pt_fp8"* ]]; then diff --git a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh index 326b14ee2e0..d87a9596ba0 100644 --- a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh +++ b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh @@ -1,18 +1,18 @@ #!/bin/bash -source /neural-compressor/.azure-pipelines/scripts/change_color.sh +source ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts/change_color.sh set -e pip install coverage -export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/3x/coverage.${1} -coverage_log="/neural-compressor/log_dir/coverage_log" -coverage_log_base="/neural-compressor/log_dir/coverage_log_base" -coverage_compare="/neural-compressor/log_dir/coverage_compare.html" -cd /neural-compressor/log_dir +export COVERAGE_RCFILE=${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts/ut/3x/coverage.${1} +coverage_log="${BUILD_SOURCESDIRECTORY}/log_dir/coverage_log" +coverage_log_base="${BUILD_SOURCESDIRECTORY}/log_dir/coverage_log_base" +coverage_compare="${BUILD_SOURCESDIRECTORY}/log_dir/coverage_compare.html" +cd ${BUILD_SOURCESDIRECTORY}/log_dir $BOLD_YELLOW && echo "collect coverage for PR branch" && $RESET -cp ut_3x_coverage/.coverage /neural-compressor/ +cp ut_3x_coverage/.coverage ${BUILD_SOURCESDIRECTORY}/ mkdir -p coverage_PR -cd /neural-compressor +cd ${BUILD_SOURCESDIRECTORY} coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log} coverage html -d log_dir/coverage_PR/htmlcov --rcfile=${COVERAGE_RCFILE} coverage xml -o log_dir/coverage_PR/coverage.xml --rcfile=${COVERAGE_RCFILE} @@ -20,23 +20,23 @@ ls -l log_dir/coverage_PR/htmlcov $BOLD_YELLOW && echo "collect coverage for baseline" && $RESET -cd /neural-compressor -cp -r /neural-compressor/.azure-pipelines .azure-pipelines-pr -git config --global --add safe.directory /neural-compressor +cd ${BUILD_SOURCESDIRECTORY} +cp -r ${BUILD_SOURCESDIRECTORY}/.azure-pipelines .azure-pipelines-pr +git config --global --add safe.directory ${BUILD_SOURCESDIRECTORY} git fetch git checkout master rm -rf build dist *egg-info binary_index="${1%_fp8}" echo y | pip uninstall neural_compressor_${binary_index} -cd /neural-compressor/.azure-pipelines-pr/scripts && bash install_nc.sh ${1} +cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines-pr/scripts && bash install_nc.sh ${1} coverage erase -cd /neural-compressor/log_dir +cd ${BUILD_SOURCESDIRECTORY}/log_dir mkdir -p coverage_base -rm -rf /neural-compressor/.coverage || true -cp ut_3x_baseline_coverage/.coverage /neural-compressor +rm -rf ${BUILD_SOURCESDIRECTORY}/.coverage || true +cp ut_3x_baseline_coverage/.coverage ${BUILD_SOURCESDIRECTORY} -cd /neural-compressor +cd ${BUILD_SOURCESDIRECTORY} coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log_base} coverage html -d log_dir/coverage_base/htmlcov --rcfile=${COVERAGE_RCFILE} coverage xml -o log_dir/coverage_base/coverage.xml --rcfile=${COVERAGE_RCFILE} @@ -126,10 +126,10 @@ if [[ ${#fail_items[@]} -ne 0 ]]; then $BOLD_RED && echo "Unit Test failed with ${item} coverage decrease ${decrease}%" && $RESET done $BOLD_RED && echo "compare coverage to give detail info" && $RESET - bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate} + bash ${BUILD_SOURCESDIRECTORY}/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate} exit 1 else $BOLD_GREEN && echo "Unit Test success with coverage lines: ${coverage_PR_lines_rate}%, branches: ${coverage_PR_branches_rate}%" && $RESET $BOLD_GREEN && echo "compare coverage to give detail info" && $RESET - bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate} + bash ${BUILD_SOURCESDIRECTORY}/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate} fi diff --git a/.azure-pipelines/ut-3x-pt-fp8.yml b/.azure-pipelines/ut-3x-pt-fp8.yml index 32f9941cd19..aa68da7dad2 100644 --- a/.azure-pipelines/ut-3x-pt-fp8.yml +++ b/.azure-pipelines/ut-3x-pt-fp8.yml @@ -74,17 +74,6 @@ stages: jobs: - job: CollectDatafiles steps: - - script: | - if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/ubuntu-2404.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . - fi - docker images | grep -i ${IMAGE_NAME} - if [[ $? -ne 0 ]]; then - echo "NO Such Repo" - exit 1 - fi - displayName: "Build develop docker image" - - task: DownloadPipelineArtifact@2 inputs: artifact: @@ -92,14 +81,9 @@ stages: path: $(DOWNLOAD_PATH) - script: | - echo "--- create container ---" - docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash - echo "--- docker ps ---" - docker ps - echo "--- collect logs ---" - docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \ - && bash install_nc.sh 3x_pt_fp8 \ - && bash ut/3x/collect_log_3x.sh 3x_pt_fp8" + cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts + bash install_nc.sh 3x_pt_fp8 + bash ut/3x/collect_log_3x.sh 3x_pt_fp8 displayName: "Collect UT Coverage" - task: PublishCodeCoverageResults@2 @@ -112,11 +96,3 @@ stages: targetPath: $(UPLOAD_PATH) artifact: $(ARTIFACT_NAME) publishLocation: "pipeline" - - - task: Bash@3 - condition: always() - inputs: - targetType: "inline" - script: | - docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" - displayName: "Docker clean up" diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml index 2b8ccf7069f..d5326319f2a 100644 --- a/.azure-pipelines/ut-3x-pt.yml +++ b/.azure-pipelines/ut-3x-pt.yml @@ -72,18 +72,6 @@ stages: jobs: - job: CollectDatafiles steps: - - script: | - if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then - docker build --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) \ - -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/ubuntu-2404.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . - fi - docker images | grep -i ${IMAGE_NAME} - if [[ $? -ne 0 ]]; then - echo "NO Such Repo" - exit 1 - fi - displayName: "Build develop docker image" - - task: DownloadPipelineArtifact@2 inputs: artifact: @@ -91,14 +79,9 @@ stages: path: $(DOWNLOAD_PATH) - script: | - echo "--- create container ---" - docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash - echo "--- docker ps ---" - docker ps - echo "--- collect logs ---" - docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \ - && bash install_nc.sh 3x_pt \ - && bash ut/3x/collect_log_3x.sh 3x_pt" + cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts + bash install_nc.sh 3x_pt + bash ut/3x/collect_log_3x.sh 3x_pt displayName: "Collect UT Coverage" - task: PublishCodeCoverageResults@2 @@ -111,11 +94,3 @@ stages: targetPath: $(UPLOAD_PATH) artifact: $(ARTIFACT_NAME) publishLocation: "pipeline" - - - task: Bash@3 - condition: always() - inputs: - targetType: "inline" - script: | - docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" - displayName: "Docker clean up" From 4fc997cb81f4f5c308bad72669c2d255a99434ce Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 09:38:34 +0800 Subject: [PATCH 16/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/models/env_setup.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/scripts/models/env_setup.sh b/.azure-pipelines/scripts/models/env_setup.sh index bf946035ce8..c8b6d59afb7 100644 --- a/.azure-pipelines/scripts/models/env_setup.sh +++ b/.azure-pipelines/scripts/models/env_setup.sh @@ -63,7 +63,8 @@ else fi $BOLD_YELLOW && echo "====== install requirements ======" && $RESET -/bin/bash /neural-compressor/.azure-pipelines/scripts/install_nc.sh ${inc_new_api} +cd /neural-compressor/.azure-pipelines/scripts +bash install_nc.sh ${inc_new_api} cd ${WORK_SOURCE_DIR}/${model_src_dir} From 49484d49beb75c932185bb2c42e54771d862d0d9 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 09:40:59 +0800 Subject: [PATCH 17/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/models/env_setup.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/scripts/models/env_setup.sh b/.azure-pipelines/scripts/models/env_setup.sh index c8b6d59afb7..56666dafcef 100644 --- a/.azure-pipelines/scripts/models/env_setup.sh +++ b/.azure-pipelines/scripts/models/env_setup.sh @@ -63,8 +63,8 @@ else fi $BOLD_YELLOW && echo "====== install requirements ======" && $RESET -cd /neural-compressor/.azure-pipelines/scripts -bash install_nc.sh ${inc_new_api} +cd /neural-compressor +bash /neural-compressor/.azure-pipelines/scripts/install_nc.sh ${inc_new_api} cd ${WORK_SOURCE_DIR}/${model_src_dir} From 86f0db7f2a9a251b6ca42b733c6da2bfe8042e13 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 09:50:26 +0800 Subject: [PATCH 18/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/install_nc.sh | 1 + .azure-pipelines/ut-3x-pt-fp8.yml | 7 ++++++- .azure-pipelines/ut-3x-pt.yml | 8 +++++++- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/scripts/install_nc.sh b/.azure-pipelines/scripts/install_nc.sh index ba6f0f0e657..8f6f7b4800b 100644 --- a/.azure-pipelines/scripts/install_nc.sh +++ b/.azure-pipelines/scripts/install_nc.sh @@ -1,6 +1,7 @@ #!/bin/bash echo -e "##[group]Install Neural Compressor ... " +cd /neural-compressor if [[ $1 = *"3x_pt"* ]]; then python -m pip install --no-cache-dir -r requirements_pt.txt if [[ $1 = *"3x_pt_fp8"* ]]; then diff --git a/.azure-pipelines/ut-3x-pt-fp8.yml b/.azure-pipelines/ut-3x-pt-fp8.yml index aa68da7dad2..006b623b091 100644 --- a/.azure-pipelines/ut-3x-pt-fp8.yml +++ b/.azure-pipelines/ut-3x-pt-fp8.yml @@ -80,9 +80,14 @@ stages: patterns: '*_coverage/.coverage' path: $(DOWNLOAD_PATH) + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.12' + displayName: 'Use Python 3.12' + - script: | + python setup.py install cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts - bash install_nc.sh 3x_pt_fp8 bash ut/3x/collect_log_3x.sh 3x_pt_fp8 displayName: "Collect UT Coverage" diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml index d5326319f2a..567f07a0e69 100644 --- a/.azure-pipelines/ut-3x-pt.yml +++ b/.azure-pipelines/ut-3x-pt.yml @@ -78,9 +78,15 @@ stages: patterns: '*_coverage/.coverage' path: $(DOWNLOAD_PATH) + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.12' + displayName: 'Use Python 3.12' + - script: | + pip install -r requirements.txt + python setup.py install cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts - bash install_nc.sh 3x_pt bash ut/3x/collect_log_3x.sh 3x_pt displayName: "Collect UT Coverage" From 73be8a16d59d73004f727712f1f01eb09a834e02 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 11:19:52 +0800 Subject: [PATCH 19/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/scripts/ut/3x/collect_log_3x.sh | 2 +- .azure-pipelines/ut-3x-pt-fp8.yml | 3 ++- .azure-pipelines/ut-3x-pt.yml | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh index d87a9596ba0..8595862a390 100644 --- a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh +++ b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh @@ -28,7 +28,7 @@ git checkout master rm -rf build dist *egg-info binary_index="${1%_fp8}" echo y | pip uninstall neural_compressor_${binary_index} -cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines-pr/scripts && bash install_nc.sh ${1} +cd ${BUILD_SOURCESDIRECTORY} && python setup.py install pt coverage erase cd ${BUILD_SOURCESDIRECTORY}/log_dir diff --git a/.azure-pipelines/ut-3x-pt-fp8.yml b/.azure-pipelines/ut-3x-pt-fp8.yml index 006b623b091..a7240e773f0 100644 --- a/.azure-pipelines/ut-3x-pt-fp8.yml +++ b/.azure-pipelines/ut-3x-pt-fp8.yml @@ -86,7 +86,8 @@ stages: displayName: 'Use Python 3.12' - script: | - python setup.py install + cd ${BUILD_SOURCESDIRECTORY} + python setup.py install pt cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts bash ut/3x/collect_log_3x.sh 3x_pt_fp8 displayName: "Collect UT Coverage" diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml index 567f07a0e69..73108df9926 100644 --- a/.azure-pipelines/ut-3x-pt.yml +++ b/.azure-pipelines/ut-3x-pt.yml @@ -84,8 +84,9 @@ stages: displayName: 'Use Python 3.12' - script: | + cd ${BUILD_SOURCESDIRECTORY} pip install -r requirements.txt - python setup.py install + python setup.py install pt cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts bash ut/3x/collect_log_3x.sh 3x_pt displayName: "Collect UT Coverage" From 1181e2bf26d88862289fd99e7b8ebd738a598246 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Tue, 18 Nov 2025 12:55:44 +0800 Subject: [PATCH 20/20] fix Signed-off-by: Sun, Xuehao --- .azure-pipelines/ut-3x-pt-fp8.yml | 1 + .azure-pipelines/ut-3x-pt.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.azure-pipelines/ut-3x-pt-fp8.yml b/.azure-pipelines/ut-3x-pt-fp8.yml index a7240e773f0..8a70f9d70bc 100644 --- a/.azure-pipelines/ut-3x-pt-fp8.yml +++ b/.azure-pipelines/ut-3x-pt-fp8.yml @@ -87,6 +87,7 @@ stages: - script: | cd ${BUILD_SOURCESDIRECTORY} + pip install -U pip setuptools python setup.py install pt cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts bash ut/3x/collect_log_3x.sh 3x_pt_fp8 diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml index 73108df9926..a7d03393bc0 100644 --- a/.azure-pipelines/ut-3x-pt.yml +++ b/.azure-pipelines/ut-3x-pt.yml @@ -85,6 +85,7 @@ stages: - script: | cd ${BUILD_SOURCESDIRECTORY} + pip install -U pip setuptools pip install -r requirements.txt python setup.py install pt cd ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/scripts