Files
ROCm/.azuredevops/nightly/pytorch.yml

552 lines
19 KiB
YAML

parameters:
# ubuntu near-equivalent list of yum installs in https://github.com/ROCm/ROCm-docker/blob/master/dev/Dockerfile-centos-7-complete
# plus additional packages found through iterative testing of pipeline
- name: aptPackages
type: object
default:
- autoconf
- bc
- bridge-utils
- build-essential
- ca-certificates
- ccache
- devscripts
- dkms
- doxygen
- fakeroot
- ffmpeg
- gfortran
- git
- gnutls-bin
- libamd2
- libavformat-dev
- libblas3
- libcamd2
- libccolamd2
- libcholmod3
- libcolamd2
- libdpkg-dev
- libdpkg-perl
- libdrm-amdgpu1
- libdrm-dev
- libelf-dev
- libfreetype-dev
- libgfortran5
- libgomp1
- libjpeg-dev
- libjpeg-turbo-official
- liblapack-dev
- liblapack3
- libmetis5
- libncurses-dev
- libnuma-dev
- libopenblas-dev
- libpth-dev
- libquadmath0
- libssh-dev
- libstdc++-12-dev
- libsuitesparseconfig5
- libswscale-dev
- libtinfo-dev
- libunwind-dev
- libwebp-dev
- llvm-dev
- ncurses-base
- ninja-build
- numactl
- openjdk-8-jdk
- python-is-python3
- python3-dev
- python3-pip
- python3-venv
- qemu-kvm
- re2c
- subversion
- wget
- zip
- name: pipModules
type: object
default:
- astunparse
- "expecttest>=0.3.0"
- hypothesis
- numpy
- psutil
- pyyaml
- requests
- setuptools==75.8.0
- types-dataclasses
- "typing-extensions>=4.10.0"
- "sympy>=1.13.3"
- filelock
- networkx
- jinja2
- fsspec
- lintrunner
- ninja
- packaging
- "optree>=0.13.0"
- "click>=8.0.3"
# list for vision
- auditwheel
- future
- pytest
- pytest-azurepipelines
- pillow
# list from https://github.com/pytorch/pytorch/blob/main/.ci/manywheel/build_rocm.sh
- name: rocmDependencies
type: object
default:
- clr
- hipBLAS
- hipBLASLt
- hipFFT
- hipRAND
- hipSOLVER
- hipSPARSE
- hipSPARSELt
- llvm-project
- MIOpen
- rccl
- rocBLAS
- rocFFT
- rocm-core
- rocminfo
- rocm_smi_lib
- rocPRIM
- rocprofiler-register
- rocRAND
- ROCR-Runtime
- rocSOLVER
- rocSPARSE
- roctracer
# below are additional dependencies not called out by build script, but throw errors during cmake
- composable_kernel
- hipBLAS-common
- hipCUB
- rocThrust
- name: rocmTestDependencies
type: object
default:
# rocroller.so needed and is not included in the wheel
- hipBLASLt
- rocminfo
# Reference on what tests to run for torchvision found in private repo:
# https://github.com/ROCm/rocAutomation/blob/jenkins-pipelines/pytorch/pytorch_ci/test_pytorch_test1.sh#L54
# Will iterate through this list using pytest
- name: torchTestList
type: object
default:
- nn
- torch
# - cuda seg faults and might need cuda installed on test system
# - ops takes too long
- unary_ufuncs
- binary_ufuncs
- autograd
# - inductor/torchinductor takes too long
# set to false to disable torchvision build and test
- name: includeVision
type: boolean
default: false
trigger: none
pr: none
schedules:
- cron: '30 7 * * *'
displayName: nightly pytorch
branches:
include:
- develop
always: true
jobs:
- job: pytorch
timeoutInMinutes: 120
strategy:
matrix:
amd-staging-gfx942:
ROCM_BRANCH: amd-staging
JOB_GPU_TARGET: gfx942
variables:
- group: common
- template: /.azuredevops/variables-global.yml
# various flags/parameters expected by bash scripts in pytorch repo's .ci directory
- name: ROCM_VERSION
value: 6.5.0
- name: ROCM_PATH
value: /opt/rocm
- name: DESIRED_CUDA
value: 6.5.0
- name: MAGMA_ROCM
value: 6.3
- name: DESIRED_PYTHON
value: 3.10
- name: PYTORCH_ROOT
value: $(Build.SourcesDirectory)/pytorch
- name: DESIRED_DEVTOOLSET
value: cxx11-abi
pool: ${{ variables.ULTRA_BUILD_POOL }}
workspace:
clean: all
steps:
# copy environment setup from https://github.com/pytorch/pytorch/blob/main/.ci/docker/manywheel/Dockerfile
# but instead of centos, use ubuntu environment
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-cmake-latest.yml
- task: Bash@3
displayName: 'Register libjpeg-turbo packages'
inputs:
targetType: inline
script: |
sudo mkdir --parents --mode=0755 /etc/apt/keyrings
wget -q -O- https://packagecloud.io/dcommander/libjpeg-turbo/gpgkey | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/libjpeg-turbo.gpg > /dev/null
echo "deb [signed-by=/etc/apt/trusted.gpg.d/libjpeg-turbo.gpg] https://packagecloud.io/dcommander/libjpeg-turbo/any/ any main" | sudo tee /etc/apt/sources.list.d/libjpeg-turbo.list
sudo apt update
apt-cache show libjpeg-turbo-official | grep Version
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-other.yml
parameters:
aptPackages: ${{ parameters.aptPackages }}
pipModules: ${{ parameters.pipModules }}
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/preamble.yml
# wheel install location different on azure agent compared to where wheel is assumed to be installed on upstream script
- task: Bash@3
displayName: wheel install path symlink
inputs:
targetType: inline
script: |
sudo mkdir -p /opt/python/cp310-cp310/lib/python3.10
sudo ln -s /usr/local/lib/python3.10/dist-packages /opt/python/cp310-cp310/lib/python3.10/site-packages
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-rocm.yml
parameters:
dependencyList: ${{ parameters.rocmDependencies }}
gpuTarget: $(JOB_GPU_TARGET)
setupHIPLibrarySymlinks: true
- task: Bash@3
displayName: ROCm symbolic link
inputs:
targetType: inline
script: sudo ln -s $(Agent.BuildDirectory)/rocm /opt/rocm
- checkout: self
- task: Bash@3
displayName: git clone upstream pytorch
inputs:
targetType: inline
script: git clone https://github.com/pytorch/pytorch.git --depth=1 --recurse-submodules
workingDirectory: $(Build.SourcesDirectory)
# builder clone still needed due to run_tests.sh at end of build_common.sh call
- task: Bash@3
displayName: git clone pytorch builder
inputs:
targetType: inline
script: |
git clone https://github.com/pytorch/builder.git --depth=1 --recurse-submodules
sudo ln -s $(Build.SourcesDirectory)/builder /builder
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Install patchelf
inputs:
targetType: inline
script: |
sudo bash pytorch/.ci/docker/common/install_patchelf.sh
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Install rocm drm
inputs:
targetType: inline
script: |
sudo bash pytorch/.ci/docker/common/install_rocm_drm.sh
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Install rocm magma
inputs:
targetType: inline
script: |
sudo bash pytorch/.ci/docker/common/install_rocm_magma.sh $(MAGMA_ROCM)
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Install targeted typing_extensions for build
inputs:
targetType: inline
script: pip install --target=$(Build.SourcesDirectory)/pytorch/torch/.. typing_extensions
- task: Bash@3
displayName: Run ROCm Build Script
inputs:
targetType: inline
script: >-
sudo
PYTHONPATH=/home/AzDevOps/.local/lib/python3.10/site-packages:/usr/lib/python3/dist-packages
DESIRED_CUDA=$(DESIRED_CUDA)
PYTORCH_ROCM_ARCH=$(JOB_GPU_TARGET)
GPU_TARGET=$(JOB_GPU_TARGET)
DESIRED_PYTHON=$(DESIRED_PYTHON)
PYTORCH_ROOT=$(PYTORCH_ROOT)
CMAKE_PREFIX_PATH=$(Agent.BuildDirectory)/rocm
DESIRED_DEVTOOLSET=$(DESIRED_DEVTOOLSET)
PYTORCH_BUILD_VERSION=$(cat $(Build.SourcesDirectory)/pytorch/version.txt | cut -da -f1)
PYTORCH_BUILD_NUMBER=$(date -u +%Y%m%d)
SKIP_ALL_TESTS=1
bash ./.ci/manywheel/build_rocm.sh
workingDirectory: $(Build.SourcesDirectory)/pytorch
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/artifact-prepare-package.yml
parameters:
sourceDir: /remote/wheelhouserocm$(ROCM_VERSION)
contentsString: '*.whl'
# common helper source for pytorch vision and audio
- ${{ if eq(parameters.includeVision, true) }}:
- task: Bash@3
displayName: git clone pytorch test-infra
inputs:
targetType: inline
script: git clone https://github.com/pytorch/test-infra.git --depth=1 --recurse-submodules
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: install package helper
inputs:
targetType: inline
script: python3 -m pip install test-infra/tools/pkg-helpers
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: pytorch pkg helpers
inputs:
targetType: inline
script: CU_VERSION=${CU_VERSION} CHANNEL=${CHANNEL} python -m pytorch_pkg_helpers
# get torch vision source and build
- task: Bash@3
displayName: git clone pytorch vision
inputs:
targetType: inline
script: git clone https://github.com/pytorch/vision.git --depth=1 --recurse-submodules
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Build vision
inputs:
targetType: inline
script: >-
PYTORCH_VERSION=$(cat $(Build.SourcesDirectory)/pytorch/version.txt | cut -da -f1)post$(date -u +%Y%m%d)
BUILD_VERSION=$(cat $(Build.SourcesDirectory)/vision/version.txt | cut -da -f1)post$(date -u +%Y%m%d)
python3 setup.py bdist_wheel
workingDirectory: $(Build.SourcesDirectory)/vision
- task: Bash@3
displayName: Relocate vision
inputs:
targetType: inline
script: python3 packaging/wheel/relocate.py
workingDirectory: $(Build.SourcesDirectory)/vision
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/artifact-prepare-package.yml
parameters:
sourceDir: $(Build.SourcesDirectory)/vision/dist
contentsString: '*.whl'
clean: false
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/manifest.yml
parameters:
gpuTarget: $(JOB_GPU_TARGET)
- task: PublishPipelineArtifact@1
displayName: 'wheel file Publish'
retryCountOnTaskFailure: 3
inputs:
targetPath: $(Build.BinariesDirectory)
- task: Bash@3
displayName: Save pipeline artifact file name
inputs:
workingDirectory: $(Pipeline.Workspace)
targetType: inline
script: |
whlFile=$(find "$(Build.BinariesDirectory)" -type f -name "*.whl" | head -n 1)
if [ -n "$whlFile" ]; then
echo $(basename "$whlFile") >> pipelineArtifacts.txt
fi
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/artifact-links.yml
- job: pytorch_testing
dependsOn: pytorch
condition: and(succeeded(), eq(variables.ENABLE_GFX942_TESTS, 'true'), not(containsValue(split(variables.DISABLED_GFX942_TESTS, ','), variables['Build.DefinitionName'])))
variables:
- group: common
- template: /.azuredevops/variables-global.yml
- name: PYTORCH_TEST_WITH_ROCM
value: 1
pool: $(JOB_TEST_POOL)
workspace:
clean: all
strategy:
matrix:
gfx942:
JOB_GPU_TARGET: gfx942
JOB_TEST_POOL: ${{ variables.GFX942_TEST_POOL }}
steps:
- task: Bash@3
displayName: 'Register libjpeg-turbo packages'
inputs:
targetType: inline
script: |
sudo mkdir --parents --mode=0755 /etc/apt/keyrings
wget -q -O- https://packagecloud.io/dcommander/libjpeg-turbo/gpgkey | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/libjpeg-turbo.gpg > /dev/null
echo "deb [signed-by=/etc/apt/trusted.gpg.d/libjpeg-turbo.gpg] https://packagecloud.io/dcommander/libjpeg-turbo/any/ any main" | sudo tee /etc/apt/sources.list.d/libjpeg-turbo.list
sudo apt update
apt-cache show libjpeg-turbo-official | grep Version
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-other.yml
parameters:
aptPackages: ${{ parameters.aptPackages }}
pipModules: ${{ parameters.pipModules }}
# pytorch tests require an updated version of click, even if requirements is not called outright
- task: Bash@3
displayName: 'pip update click'
inputs:
targetType: inline
script: pip install --upgrade click
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/preamble.yml
- task: DownloadPipelineArtifact@2
displayName: 'Download Pipeline Wheel Files'
retryCountOnTaskFailure: 3
inputs:
itemPattern: '**/*.whl'
targetPath: $(Agent.BuildDirectory)
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-aqlprofile.yml
- template: ${{ variables.CI_TEMPLATE_PATH }}/steps/dependencies-rocm.yml
parameters:
dependencyList: ${{ parameters.rocmTestDependencies }}
gpuTarget: $(JOB_GPU_TARGET)
# get sources to run test scripts
- task: Bash@3
displayName: git clone upstream pytorch
inputs:
targetType: inline
script: git clone https://github.com/pytorch/pytorch.git --depth=1 --recurse-submodules
workingDirectory: $(Build.SourcesDirectory)
- ${{ if eq(parameters.includeVision, true) }}:
- task: Bash@3
displayName: git clone pytorch vision
inputs:
targetType: inline
script: git clone https://github.com/pytorch/vision.git --depth=1 --recurse-submodules
workingDirectory: $(Build.SourcesDirectory)
- task: Bash@3
displayName: Install Wheel Files
inputs:
targetType: inline
script: find . -name "*.whl" -exec pip install --no-index --find-links=. --no-dependencies -v {} \;
workingDirectory: $(Agent.BuildDirectory)
- task: Bash@3
displayName: Show Updated pip List
inputs:
targetType: inline
script: pip list -v
workingDirectory: $(Agent.BuildDirectory)
- task: Bash@3
displayName: Add ROCm binaries to PATH
inputs:
targetType: inline
script: echo "##vso[task.prependpath]$(Agent.BuildDirectory)/rocm/bin"
- task: Bash@3
displayName: Add Python site-packages binaries to path
inputs:
targetType: inline
script: |
USER_BASE=$(python3 -m site --user-base)
echo "##vso[task.prependpath]$USER_BASE/bin"
- task: Bash@3
displayName: Add torch libs to ldconfig
inputs:
targetType: inline
script: |
echo $(python3 -m site --user-site)/torch/lib | sudo tee /etc/ld.so.conf.d/torch.conf
sudo ldconfig -v
ldconfig -p
# https://pytorch.org/get-started/locally/#linux-verification
# https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/3rd-party/pytorch-install.html#testing-the-pytorch-installation
- task: Bash@3
displayName: Simple Import Torch Tests
inputs:
targetType: inline
script: |
python3 -c 'import torch' 2> /dev/null && echo 'Success' || echo 'Failure'
python3 -c 'import torch; print(torch.cuda.is_available())'
python3 -c 'import torch; x = torch.rand(5, 3); print(x)'
# Test artifact build script has too many if statements for different environments
# Based off the snippet of interest for this environment, with some adjustments
# https://github.com/pytorch/pytorch/blob/main/.ci/pytorch/build.sh#L330-L371
# Removing in-line comments since it does not fit with the yaml markup
- task: Bash@3
displayName: Build Pytorch Test Artifacts
continueOnError: true
inputs:
targetType: inline
script: |
CUSTOM_TEST_ARTIFACT_BUILD_DIR="build/custom_test_artifacts"
CUSTOM_TEST_USE_ROCM=ON
CUSTOM_TEST_MODULE_PATH="${PWD}/cmake/public"
mkdir -pv "${CUSTOM_TEST_ARTIFACT_BUILD_DIR}"
CUSTOM_OP_BUILD="${CUSTOM_TEST_ARTIFACT_BUILD_DIR}/custom-op-build"
CUSTOM_OP_TEST="${PWD}/test/custom_operator"
python --version
SITE_PACKAGES="$(python -c 'import site; print(";".join([x for x in site.getsitepackages()] + [x + "/torch" for x in site.getsitepackages()]))')"
mkdir -p "$CUSTOM_OP_BUILD"
pushd "$CUSTOM_OP_BUILD"
cmake "$CUSTOM_OP_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
make VERBOSE=1
popd
JIT_HOOK_BUILD="${CUSTOM_TEST_ARTIFACT_BUILD_DIR}/jit-hook-build"
JIT_HOOK_TEST="$PWD/test/jit_hooks"
python --version
SITE_PACKAGES="$(python -c 'import site; print(";".join([x for x in site.getsitepackages()] + [x + "/torch" for x in site.getsitepackages()]))')"
mkdir -p "$JIT_HOOK_BUILD"
pushd "$JIT_HOOK_BUILD"
cmake "$JIT_HOOK_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
make VERBOSE=1
popd
CUSTOM_BACKEND_BUILD="${CUSTOM_TEST_ARTIFACT_BUILD_DIR}/custom-backend-build"
CUSTOM_BACKEND_TEST="${PWD}/test/custom_backend"
python --version
mkdir -p "$CUSTOM_BACKEND_BUILD"
pushd "$CUSTOM_BACKEND_BUILD"
cmake "$CUSTOM_BACKEND_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
make VERBOSE=1
popd
workingDirectory: $(Build.SourcesDirectory)/pytorch
- ${{ each torchTest in parameters.torchTestList }}:
- task: Bash@3
displayName: Test ${{ torchTest }}
continueOnError: true
inputs:
targetType: inline
workingDirectory: $(Build.SourcesDirectory)/pytorch
${{ if contains(torchTest, '/') }}:
script: pytest test/${{ split(torchTest, '/')[0] }}/test_${{ split(torchTest, '/')[1] }}.py
${{ else }}:
script: pytest test/test_${{ torchTest }}.py
# Reference on what tests to run for torchvision found in private repo:
# https://github.com/ROCm/rocAutomation/blob/jenkins-pipelines/pytorch/pytorch_ci/test_torchvision.sh#L51
- ${{ if eq(parameters.includeVision, true) }}:
- task: Bash@3
displayName: Test vision/transforms
continueOnError: true
inputs:
targetType: inline
script: pytest test/test_transforms.py
workingDirectory: $(Build.SourcesDirectory)/vision
- task: Bash@3
displayName: Uninstall Wheel Files
inputs:
targetType: inline
script: find . -name "*.whl" -exec pip uninstall -y {} \;
workingDirectory: $(Agent.BuildDirectory)
- task: Bash@3
displayName: Remove Python site-packages binaries from path
inputs:
targetType: inline
script: |
USER_BASE=$(python3 -m site --user-base)
echo "##vso[task.setvariable variable=PATH]$(echo $PATH | sed -e 's;:$USER_BASE/bin;;' -e 's;^/;;' -e 's;/$;;')"
- task: Bash@3
displayName: Remove ROCm binaries from PATH
inputs:
targetType: inline
script: echo "##vso[task.setvariable variable=PATH]$(echo $PATH | sed -e 's;:$(Agent.BuildDirectory)/rocm/bin;;' -e 's;^/;;' -e 's;/$;;')"