Update hip_backend to use libhsa-runtime for arch info, (#411)

brings in path changes for pytorch triton wheels

Co-authored-by: jayfurmanek <Jason.Furmanek@amd.com>
This commit is contained in:
Jack Taylor
2023-12-21 15:40:57 +00:00
committed by GitHub
parent 0248bdb29d
commit 1e2fd0dd1a
8 changed files with 21 additions and 23 deletions

View File

@@ -211,11 +211,7 @@ include_directories(${LLVM_INCLUDE_DIRS})
include_directories(${PROJECT_SOURCE_DIR}/include)
include_directories(${PROJECT_BINARY_DIR}/include) # Tablegen'd files
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/python/triton/third_party/hip/lib/hsa/libhsa-runtime64.so)
set(ROCM_LIBRARIES
${CMAKE_CURRENT_SOURCE_DIR}/python/triton/third_party/hip/lib/hsa/libhsa-runtime64.so
)
elseif(EXISTS "$ENV{ROCM_PATH}/lib/libhsa-runtime64.so" )
if(EXISTS "$ENV{ROCM_PATH}/lib/libhsa-runtime64.so" )
set(ROCM_LIBRARIES
"$ENV{ROCM_PATH}/lib/libhsa-runtime64.so"
)
@@ -223,6 +219,10 @@ elseif(EXISTS "${ROCM_DEFAULT_DIR}/lib/libhsa-runtime64.so" )
set(ROCM_LIBRARIES
"${ROCM_DEFAULT_DIR}/lib/libhsa-runtime64.so"
)
elseif(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/python/triton/third_party/hip/lib/hsa/libhsa-runtime64.so)
set(ROCM_LIBRARIES
${CMAKE_CURRENT_SOURCE_DIR}/python/triton/third_party/hip/lib/hsa/libhsa-runtime64.so
)
else()
message(STATUS "WARNING: Can't find libhsa-runtime64.so")
endif()

View File

@@ -232,7 +232,7 @@ std::string generate_hsaco(llvm::Module *module, const std::string &triple,
.parent_path()
.parent_path() /
"triton" / "third_party" /
"rocm" / "llvm" / "bin" / "ld.lld";
"hip" / "llvm" / "bin" / "ld.lld";
std::string lld_path = compiletime_path.string();
if (!std::filesystem::exists(lld_path)) {
std::string rocm_path = ::triton::tools::getenv("ROCM_PATH");

View File

@@ -36,7 +36,7 @@ def libcuda_dirs():
@functools.lru_cache()
def rocm_path_dir():
default_path = os.path.join(os.path.dirname(__file__), "..", "third_party", "rocm")
default_path = os.path.join(os.path.dirname(__file__), "..", "third_party", "hip")
# Check if include files have been populated locally. If so, then we are
# most likely in a whl installation and he rest of our libraries should be here
if (os.path.exists(default_path+"/include/hip/hip_runtime.h")):

View File

@@ -386,7 +386,6 @@ def get_arch_default_num_stages(device_type, capability=None):
assert _device_backend
arch = _device_backend.get_architecture_descriptor()
num_stages = arch["num_stages"]
return num_stages

View File

@@ -300,16 +300,14 @@ def gpu_matrix_core_version() -> int:
def get_amdgpu_arch_fulldetails():
# print("get_amdgpu_arch_fulldetails")
"""
get the amdgpu fulll ISA details for compiling:
get the amdgpu full ISA details for compiling:
i.e., arch_triple: amdgcn-amd-amdhsa; arch_name: gfx906; arch_features: sramecc+:xnack-
"""
try:
# TODO: package rocm.cc with Triton
rocm_path_dir = os.getenv("ROCM_PATH", default="/opt/rocm")
rocminfo = subprocess.check_output(rocm_path_dir + '/bin/rocminfo').decode()
gfx_arch_details = re.search('amd.*', rocminfo).group(0).strip().split('--')
arch_info = _triton.get_arch_info()
gfx_arch_details = re.search('amd.*', arch_info).group(0).strip().split('--')
arch_triple = gfx_arch_details[0]
arch_name_features = gfx_arch_details[1].split(':')
arch_name = arch_name_features[0]
@@ -319,13 +317,13 @@ def get_amdgpu_arch_fulldetails():
gfx_arch = os.environ.get('MI_GPU_ARCH', arch_name)
if gfx_arch is None:
raise RuntimeError('gfx_arch is None (not specified)')
mat_core_ver = gpu_matrix_core_version()
capability = gpu_matrix_core_version() * 100
return {"gfx_triple": arch_triple, "gfx_arch": gfx_arch, "gfx_features": arch_features,\
"capability": capability, "matrix_core_version": mat_core_ver}
except BaseException:
except BaseException as e:
print("Error: Attempting to get amgpu ISA Details {}".format(e))
return None

View File

@@ -217,7 +217,7 @@ std::string generate_hsaco(llvm::Module *module, const std::string &triple,
.parent_path()
.parent_path() /
"triton" / "third_party" /
"rocm" / "llvm" / "bin" / "ld.lld";
"hip" / "llvm" / "bin" / "ld.lld";
std::string lld_path = compiletime_path.string();
if (!std::filesystem::exists(lld_path)) {
std::string rocm_path = ::triton::tools::getenv("ROCM_PATH");
@@ -778,4 +778,4 @@ translateTritonIRToHSACO(mlir::ModuleOp module, std::string gfx_arch,
}
} // namespace triton
} // namespace mlir
} // namespace mlir

View File

@@ -2,8 +2,8 @@
#From https://github.com/pytorch/builder/blob/main/manywheel/build_common.sh
WHEELHOUSE_DIR=/artifacts
PATCHELF_BIN=patchelf
ROCM_LIB=third_party/rocm/lib
ROCM_LD=third_party/rocm/llvm/bin
ROCM_LIB=third_party/hip/lib
ROCM_LD=third_party/hip/llvm/bin
PREFIX=triton
fname_without_so_number() {

View File

@@ -9,11 +9,11 @@ fi
# Check TRITON_ROCM_DIR is set
if [[ -z "${TRITON_ROCM_DIR}" ]]; then
export TRITON_ROCM_DIR=python/triton/third_party/rocm
export TRITON_ROCM_DIR=python/triton/third_party/hip
fi
# Create triton lib directory
mkdir -p $TRITON_ROCM_DIR/lib
# Remove current libhsa included to avoid confusion
rm $TRITON_ROCM_DIR/lib/hsa/libhsa-runtime*
LIBTINFO_PATH="/usr/lib64/libtinfo.so.5"
LIBNUMA_PATH="/usr/lib64/libnuma.so.1"
@@ -30,7 +30,7 @@ do
cp $lib $TRITON_ROCM_DIR/lib/
done
# Required ROCm libraries - dynamically find so numbers
# Required ROCm libraries
ROCM_SO=(
"libhsa-runtime64.so.1"
"libamdhip64.so.5"
@@ -71,3 +71,4 @@ cp -r $ROCM_HOME/include $TRITON_ROCM_DIR/
# Copy linker
mkdir -p $TRITON_ROCM_DIR/llvm/bin
cp $ROCM_HOME/llvm/bin/ld.lld $TRITON_ROCM_DIR/llvm/bin/