|
| 1 | +trigger: none |
| 2 | + |
| 3 | +pr: |
| 4 | + autoCancel: true |
| 5 | + drafts: false |
| 6 | + branches: |
| 7 | + include: |
| 8 | + - master |
| 9 | + paths: |
| 10 | + include: |
| 11 | + - neural_compressor/common |
| 12 | + - neural_compressor/tensorflow |
| 13 | + - test/3x/tensorflow |
| 14 | + - setup.py |
| 15 | + - requirements_tf.txt |
| 16 | + |
| 17 | +pool: ICX-16C |
| 18 | + |
| 19 | +variables: |
| 20 | + IMAGE_NAME: "neural-compressor" |
| 21 | + IMAGE_TAG: "py38" |
| 22 | + UPLOAD_PATH: $(Build.SourcesDirectory)/log_dir |
| 23 | + DOWNLOAD_PATH: $(Build.SourcesDirectory)/log_dir |
| 24 | + ARTIFACT_NAME: "UT_coverage_report_3x_tf" |
| 25 | + REPO: $(Build.Repository.Uri) |
| 26 | + |
| 27 | +stages: |
| 28 | + - stage: TensorFlow |
| 29 | + displayName: Unit Test 3x TensorFlow |
| 30 | + dependsOn: [] |
| 31 | + jobs: |
| 32 | + - job: |
| 33 | + displayName: Unit Test 3x TensorFlow |
| 34 | + steps: |
| 35 | + - template: template/ut-template.yml |
| 36 | + parameters: |
| 37 | + dockerConfigName: "commonDockerConfig" |
| 38 | + utScriptFileName: "3x/run_3x_tf" |
| 39 | + uploadPath: $(UPLOAD_PATH) |
| 40 | + utArtifact: "ut_coverage_3x" |
| 41 | + |
| 42 | + |
| 43 | + - stage: TensorFlow_baseline |
| 44 | + displayName: Unit Test 3x TensorFlow baseline |
| 45 | + dependsOn: [] |
| 46 | + jobs: |
| 47 | + - job: |
| 48 | + displayName: Unit Test 3x TensorFlow baseline |
| 49 | + steps: |
| 50 | + - template: template/ut-template.yml |
| 51 | + parameters: |
| 52 | + dockerConfigName: "gitCloneDockerConfig" |
| 53 | + utScriptFileName: "3x/run_3x_tf" |
| 54 | + uploadPath: $(UPLOAD_PATH) |
| 55 | + utArtifact: "ut_coverage_3x_baseline" |
| 56 | + repo: $(REPO) |
| 57 | + |
| 58 | + - stage: Coverage |
| 59 | + displayName: "Coverage Combine" |
| 60 | + pool: |
| 61 | + vmImage: "ubuntu-latest" |
| 62 | + dependsOn: [TensorFlow, TensorFlow_baseline] |
| 63 | + jobs: |
| 64 | + - job: CollectDatafiles |
| 65 | + steps: |
| 66 | + - script: | |
| 67 | + if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then |
| 68 | + docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . |
| 69 | + fi |
| 70 | + docker images | grep -i ${IMAGE_NAME} |
| 71 | + if [[ $? -ne 0 ]]; then |
| 72 | + echo "NO Such Repo" |
| 73 | + exit 1 |
| 74 | + fi |
| 75 | + displayName: "Build develop docker image" |
| 76 | +
|
| 77 | + - task: DownloadPipelineArtifact@2 |
| 78 | + inputs: |
| 79 | + artifact: |
| 80 | + path: $(DOWNLOAD_PATH) |
| 81 | + |
| 82 | + - script: | |
| 83 | + echo "--- create container ---" |
| 84 | + docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash |
| 85 | + echo "--- docker ps ---" |
| 86 | + docker ps |
| 87 | + echo "--- collect logs ---" |
| 88 | + docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \ |
| 89 | + && bash install_nc.sh 3x_tf \ |
| 90 | + && bash ut/3x/collect_log_3x.sh 3x_tf" |
| 91 | + displayName: "collect logs" |
| 92 | +
|
| 93 | + - task: PublishPipelineArtifact@1 |
| 94 | + condition: succeededOrFailed() |
| 95 | + inputs: |
| 96 | + targetPath: $(UPLOAD_PATH) |
| 97 | + artifact: $(ARTIFACT_NAME) |
| 98 | + publishLocation: "pipeline" |
| 99 | + |
| 100 | + - task: Bash@3 |
| 101 | + condition: always() |
| 102 | + inputs: |
| 103 | + targetType: "inline" |
| 104 | + script: | |
| 105 | + docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" |
| 106 | + displayName: "Docker clean up" |
0 commit comments