diff options
author | V3n3RiX <venerix@koprulu.sector> | 2024-08-09 11:04:53 +0100 |
---|---|---|
committer | V3n3RiX <venerix@koprulu.sector> | 2024-08-09 11:04:53 +0100 |
commit | 43297a4ed0a3760bbdd0b8c286b779f174ca5368 (patch) | |
tree | 15b92efd455b5d5eb0ef8a4af4f5f24572c4b46b /sci-libs/caffe2 | |
parent | 14866757225815b9374acfc8453518951e0f910d (diff) |
gentoo auto-resync : 09:08:2024 - 11:04:52
Diffstat (limited to 'sci-libs/caffe2')
-rw-r--r-- | sci-libs/caffe2/Manifest | 7 | ||||
-rw-r--r-- | sci-libs/caffe2/caffe2-2.4.0.ebuild | 289 | ||||
-rw-r--r-- | sci-libs/caffe2/files/caffe2-2.4.0-exclude-aotriton.patch | 65 | ||||
-rw-r--r-- | sci-libs/caffe2/files/caffe2-2.4.0-fix-openmp-link.patch | 14 | ||||
-rw-r--r-- | sci-libs/caffe2/files/caffe2-2.4.0-gentoo.patch | 211 | ||||
-rw-r--r-- | sci-libs/caffe2/files/caffe2-2.4.0-install-dirs.patch | 70 | ||||
-rw-r--r-- | sci-libs/caffe2/files/caffe2-2.4.0-rocm-fix-std-cpp17.patch | 50 |
7 files changed, 706 insertions, 0 deletions
diff --git a/sci-libs/caffe2/Manifest b/sci-libs/caffe2/Manifest index 22ad33c0966d..cee0d49092c1 100644 --- a/sci-libs/caffe2/Manifest +++ b/sci-libs/caffe2/Manifest @@ -16,10 +16,17 @@ AUX caffe2-2.3.0-fix-libcpp.patch 1281 BLAKE2B 67943ec6e79327c854ebcd3538f68dd28 AUX caffe2-2.3.0-fix-rocm-gcc14-clamp.patch 1009 BLAKE2B ad27422dfc7be2720b972e1bbc417874a1060d2cbd5edb0acf166a7437963702b830766950db33381d6f32c6f95a001d8424ece73f70603fae8bf5f50b2ba255 SHA512 67f26127632cfca91389fdc60cea4d31a9b259e5547ccd6778bfc98a19bdd7b632ec17abbe888842ff613430c49aac37e18aca441335a947f973fd9d978ac3fe AUX caffe2-2.3.0-optional-hipblaslt.patch 10001 BLAKE2B a73913a9d82acfb780ad95fc1aabef1dfbd20243a8caef7136dbdad72a120c8778348d82e25d9171453e159580caf1ec5fdaa9bdbef4a09981721579e50f6b21 SHA512 222d33a3253d35c64dc151d12a42a9a0d4edca7fe60e1bb9b0c43df07292a9a061b2259a5696b4d568db265b0df4f065b3ee54672481c788ce1e2b0ae01b8488 AUX caffe2-2.3.0-rocm-fix-std-cpp17.patch 3378 BLAKE2B 9e88fa1bf68c397c8122ea5b3504a22b3f6ef92c77dad8bd84ee03b4f75792b0e1281d8b1aa981ad1bf65060179fa08ef14e776e82abdec9147dfbb3bf37a7ae SHA512 7797a140abf736f2a4628cd727cf0c58ed39c9764b9ce3b67d17fc0c9b9965e647266c815e5322f96f807680120e25ccdbbc66b66c7c6cf84edb811330ad452c +AUX caffe2-2.4.0-exclude-aotriton.patch 2832 BLAKE2B 319e9516b2b5e9d4fba622d7b618085528103f15c1db87185a2c1cae61ec4636cb69e0e1792dca549c42dd55527c46079be34b28c8b74ec1407ef7b5010e51cd SHA512 26612e973085e225f391c125dc265a45e8b4fe556a1b041d9b687fdc7354a07e89dc29c5e243881acb22027e1ba93b4520e7d809d74f83dea2c1d07fd15eb804 +AUX caffe2-2.4.0-fix-openmp-link.patch 633 BLAKE2B 067d970b062e9e5b67925b0f592368869c02a76395e8b36453835c45ab34911823936ce5810a2e15d050915a191cca5f415314189053be8ccae0a5749bf70f94 SHA512 1354a30f45c6cf0238c98dc7aaea734af680cdfa7cfefb2f09f7053ff8acded2c936a6ef24ac6e369715ebc197d5a3cd8e3b8fc30dc3ea092f403cbd019cb00e +AUX caffe2-2.4.0-gentoo.patch 8283 BLAKE2B c74bb8b2e5c6ff9f9e5fbf23acc6554c0163017a25ec3aa2657007099d6cec64869cbaf0e5a43ad1e0fc9b09a5940e289133ac8824244fda9d86211aa4a55df2 SHA512 792f9cbb6ed13ef3b0e7b3f869489ec2c9d21fb6364feb27e57c23722df6a54bf307e631d0c955221cc5ea6081f7132c1d44f541d34c8791d1792da9820d35c9 +AUX caffe2-2.4.0-install-dirs.patch 2908 BLAKE2B 64bc9adcf377e13c2289ea034bceaa8b370b32ad0024c16ccde0c6adb7ba2b7c929d6b04d26c4c77c502127d9ab8e74305ab661b500b4c862c7673ec3422467d SHA512 c02f2e62b68a59a8948ee55ab170c0d5a89ca2cc7afccc77ece5e0539c050d45d5b0fb14ea4954f85a138c004bfcad580b76b357a1b2595916e13d79387b15e0 +AUX caffe2-2.4.0-rocm-fix-std-cpp17.patch 2431 BLAKE2B 649ff824f454a7b6fe2e95e487c09b51dbc882f23e2910598741a4f3b4c7a5d55257a0ba188bc386a81bf676f7872e60e71657ae82197772ed44cc236ec2d2f4 SHA512 b2e96f0d039a7e29063829b7efb49fbc7ebc04428ee0f8d4197f6048cf7166f04e3c980791cecabe685f280f9ae5b94ce388ee5fc00d6aec19ae1d1a32a43331 DIST pytorch-2.2.2.tar.gz 116367503 BLAKE2B 0be22f2ec4b9aac6f5e976664cae01facf07929a32565cd57d7cc5b2d9888e9ae71ca301853752fe8f31d174d04c9974eb9ed2f3d452360a50ccf024f200726a SHA512 7990e0f9484038c3458c0bda2c863bf2b19e56edab81fc5938c6e0f08b17558287f853bb67350e8cca8f42bec0f1d4ba0e94e50a145db8da44bdd4bd703d91d0 DIST pytorch-2.3.0.tar.gz 117029829 BLAKE2B 8f9c0d71ee0a9219b495eddccdcc65107f7ad537c43c68100b229f3d27b0e6c01ccb1659c7fffc356a48d80f2adc0a10361305dc8f1df20446de837d380f89f6 SHA512 67f7e9a096c3ffb952206ebf9105bedebb68c24ad82456083adf1d1d210437fcaa9dd52b68484cfc97d408c9eebc9541c76868c34a7c9982494dc3f424cfb07c DIST pytorch-2.3.1.tar.gz 117035696 BLAKE2B d419d7fa1342f1fb317ffce09ec9dc1447414627cc83d36578fe60f68c283c620b2b4d49f414cd206d537b90b16432a06cd1941662720db05d5e2b6c493325f5 SHA512 e1bcae44f9939fc7ccb1360a9b1970d92426f25e5de73e36964df3dd15ad5d8d9f5bd2f9a7dda6b8f64e2bba3674005bd869f542489cc442ad0125a02676f587 +DIST pytorch-2.4.0.tar.gz 115031093 BLAKE2B d206477963977011627df284efa01482fbf57e9fcb5f58f51d679c742b8e5dde6aa6affd8745ab817fcd09477d129a81e74e07be576b5d3585eaca1c735b8e01 SHA512 804d25944035f33de6591fd942fbda44d3de037717a4397d38a97474b01775d30eaf93d16dd708a832c0119050d24d73b90990fd3e3773be79d26ada25244d22 EBUILD caffe2-2.2.2-r1.ebuild 7452 BLAKE2B 31ef525960d7c3866580985f9ba9736e5419f17c2a63251b8c4fe961a6789a33dee746b14b12fa331c8c20a6c821565b92dd2cb7c6cdd61678bba7ddf5fc7400 SHA512 f232d901c08e3fefd0d6260d07915786747df8f83269c3980c16112c968114c3cdb7daccd8132a6a3850a266a04b38f6df6d2b1f1bc35f473fb9a58d5fff3452 EBUILD caffe2-2.3.0-r3.ebuild 8441 BLAKE2B 7b7c44a04e072fbf4bcc4fabf470b5256b85b36bb5035a56ceef65843dcaa1eec1eb3cdf9246cf3c672b6d3f4885816a037ef875f77bbf38f9d5a0fafda7981c SHA512 34306a2af160fef5ad902d116da044e96d4eaafd46b8ea511f7b04ef590ccdace7deabeb8905a5b69d9e1a21fc58ab038356d4542dd378c21078a75d08c188ca EBUILD caffe2-2.3.1.ebuild 8462 BLAKE2B 1c079bd3119ad0acd224a99cc029ba1e8d557065bbd2cfa30670f6f3720072520a072d0abed1e21f41781581f0b6711e809581fe01e363b613e2b82e38635d94 SHA512 591bee9c3a6beaa9f1687881381692bd7de32a6203d924d36ab89c2fc7de754fd7a5a4056e0e19f293aa836975d1d9321ee5997939270ff10df41fd31e5c9157 +EBUILD caffe2-2.4.0.ebuild 8158 BLAKE2B ae881185ee8e4e317316c4f0bfc9282685a1632aede2907213f4b7e063b55f4bf90886b741183c30aa30b5936e2d14dded4bb57de3ade8f87dd19e3a47403a6e SHA512 f660100f1d8ba8f6ecb9acff1c419457c5b879a005a08773ebd73fc3d14a516e0005a605f9b318254e268bab4456345fbbd6cc7b6993a73b291e509b3c9fa2d7 MISC metadata.xml 1225 BLAKE2B ab7fb0bf8b2d37ddaa1a9ecc815eb094e85465d20d3a30af081b42e0b60ade9858d0053b101ba0e7750a90cb48b5b79db9bdc2729bf66d0420732489da62fe54 SHA512 dfb58597fb4bcdd7df0fcc3f2514518e118e8fc9b1cd24868aab60c32a62ff419b8b72a7c294925eff4c8871cc8df606af7fa60bfa99901091d8195101ee1153 diff --git a/sci-libs/caffe2/caffe2-2.4.0.ebuild b/sci-libs/caffe2/caffe2-2.4.0.ebuild new file mode 100644 index 000000000000..b4384eb7df11 --- /dev/null +++ b/sci-libs/caffe2/caffe2-2.4.0.ebuild @@ -0,0 +1,289 @@ +# Copyright 2022-2024 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=8 + +PYTHON_COMPAT=( python3_{10..12} ) +ROCM_VERSION=6.1 +inherit python-single-r1 cmake cuda flag-o-matic prefix rocm toolchain-funcs + +MYPN=pytorch +MYP=${MYPN}-${PV} + +DESCRIPTION="A deep learning framework" +HOMEPAGE="https://pytorch.org/" +SRC_URI="https://github.com/pytorch/${MYPN}/archive/refs/tags/v${PV}.tar.gz + -> ${MYP}.tar.gz" + +S="${WORKDIR}"/${MYP} + +LICENSE="BSD" +SLOT="0" +KEYWORDS="~amd64" +IUSE="cuda distributed fbgemm flash gloo mkl mpi nnpack +numpy onednn openblas opencl openmp qnnpack rocm xnnpack" +RESTRICT="test" +REQUIRED_USE=" + ${PYTHON_REQUIRED_USE} + mpi? ( distributed ) + gloo? ( distributed ) + ?? ( cuda rocm ) + rocm? ( + || ( ${ROCM_REQUIRED_USE} ) + !flash + ) +" + +# CUDA 12 not supported yet: https://github.com/pytorch/pytorch/issues/91122 +RDEPEND=" + ${PYTHON_DEPS} + dev-cpp/gflags:= + >=dev-cpp/glog-0.5.0 + dev-libs/cpuinfo + dev-libs/libfmt + dev-cpp/opentelemetry-cpp + dev-libs/protobuf:= + dev-libs/pthreadpool + dev-libs/sleef + virtual/lapack + sci-libs/onnx + sci-libs/foxi + cuda? ( + dev-libs/cudnn + >=dev-libs/cudnn-frontend-1.0.3:0/8 + <dev-util/nvidia-cuda-toolkit-12.4.0:=[profiler] + ) + fbgemm? ( >=dev-libs/FBGEMM-2023.12.01 ) + gloo? ( sci-libs/gloo[cuda?] ) + mpi? ( virtual/mpi ) + nnpack? ( sci-libs/NNPACK ) + numpy? ( $(python_gen_cond_dep ' + dev-python/numpy[${PYTHON_USEDEP}] + ') ) + onednn? ( dev-libs/oneDNN ) + opencl? ( virtual/opencl ) + qnnpack? ( + !sci-libs/QNNPACK + dev-cpp/gemmlowp + ) + rocm? ( + =dev-util/hip-6.1* + =dev-libs/rccl-6.1*[${ROCM_USEDEP}] + =sci-libs/rocThrust-6.1*[${ROCM_USEDEP}] + =sci-libs/rocPRIM-6.1*[${ROCM_USEDEP}] + =sci-libs/hipBLAS-6.1*[${ROCM_USEDEP}] + =sci-libs/hipFFT-6.1*[${ROCM_USEDEP}] + =sci-libs/hipSPARSE-6.1*[${ROCM_USEDEP}] + =sci-libs/hipRAND-6.1*[${ROCM_USEDEP}] + =sci-libs/hipCUB-6.1*[${ROCM_USEDEP}] + =sci-libs/hipSOLVER-6.1*[${ROCM_USEDEP}] + =sci-libs/miopen-6.1*[${ROCM_USEDEP}] + =dev-util/roctracer-6.1*[${ROCM_USEDEP}] + + =sci-libs/hipBLASLt-6.1* + amdgpu_targets_gfx90a? ( =sci-libs/hipBLASLt-6.1*[amdgpu_targets_gfx90a] ) + amdgpu_targets_gfx940? ( =sci-libs/hipBLASLt-6.1*[amdgpu_targets_gfx940] ) + amdgpu_targets_gfx941? ( =sci-libs/hipBLASLt-6.1*[amdgpu_targets_gfx941] ) + amdgpu_targets_gfx942? ( =sci-libs/hipBLASLt-6.1*[amdgpu_targets_gfx942] ) + ) + distributed? ( sci-libs/tensorpipe[cuda?] ) + xnnpack? ( >=sci-libs/XNNPACK-2024.02.29 ) + mkl? ( sci-libs/mkl ) + openblas? ( sci-libs/openblas ) +" +DEPEND=" + ${RDEPEND} + cuda? ( >=dev-libs/cutlass-3.4.1 ) + onednn? ( sci-libs/ideep ) + dev-libs/psimd + dev-libs/FP16 + dev-libs/FXdiv + dev-libs/pocketfft + dev-libs/flatbuffers + >=sci-libs/kineto-0.4.0_p20240525 + $(python_gen_cond_dep ' + dev-python/pyyaml[${PYTHON_USEDEP}] + dev-python/pybind11[${PYTHON_USEDEP}] + dev-python/typing-extensions[${PYTHON_USEDEP}] + ') +" + +PATCHES=( + "${FILESDIR}"/${PN}-2.4.0-gentoo.patch + "${FILESDIR}"/${PN}-2.4.0-install-dirs.patch + "${FILESDIR}"/${PN}-1.12.0-glog-0.6.0.patch + "${FILESDIR}"/${PN}-1.13.1-tensorpipe.patch + "${FILESDIR}"/${PN}-2.3.0-cudnn_include_fix.patch + "${FILESDIR}"/${PN}-2.1.2-fix-rpath.patch + "${FILESDIR}"/${PN}-2.4.0-fix-openmp-link.patch + "${FILESDIR}"/${PN}-2.4.0-rocm-fix-std-cpp17.patch + "${FILESDIR}"/${PN}-2.2.2-musl.patch + "${FILESDIR}"/${PN}-2.4.0-exclude-aotriton.patch + "${FILESDIR}"/${PN}-2.3.0-fix-rocm-gcc14-clamp.patch + "${FILESDIR}"/${PN}-2.3.0-fix-libcpp.patch +) + +src_prepare() { + filter-lto #bug 862672 + sed -i \ + -e "/third_party\/gloo/d" \ + cmake/Dependencies.cmake \ + || die + cmake_src_prepare + pushd torch/csrc/jit/serialization || die + flatc --cpp --gen-mutable --scoped-enums mobile_bytecode.fbs || die + popd + # prefixify the hardcoded paths, after all patches are applied + hprefixify \ + aten/CMakeLists.txt \ + caffe2/CMakeLists.txt \ + cmake/Metal.cmake \ + cmake/Modules/*.cmake \ + cmake/Modules_CUDA_fix/FindCUDNN.cmake \ + cmake/Modules_CUDA_fix/upstream/FindCUDA/make2cmake.cmake \ + cmake/Modules_CUDA_fix/upstream/FindPackageHandleStandardArgs.cmake \ + cmake/public/LoadHIP.cmake \ + cmake/public/cuda.cmake \ + cmake/Dependencies.cmake \ + torch/CMakeLists.txt \ + CMakeLists.txt + + if use rocm; then + sed -e "s:/opt/rocm:/usr:" \ + -e "s:lib/cmake:$(get_libdir)/cmake:g" \ + -e "s/HIP 1.0/HIP 1.0 REQUIRED/" \ + -i cmake/public/LoadHIP.cmake || die + + ebegin "HIPifying cuda sources" + ${EPYTHON} tools/amd_build/build_amd.py || die + eend $? + fi +} + +src_configure() { + if use cuda && [[ -z ${TORCH_CUDA_ARCH_LIST} ]]; then + ewarn "WARNING: caffe2 is being built with its default CUDA compute capabilities: 3.5 and 7.0." + ewarn "These may not be optimal for your GPU." + ewarn "" + ewarn "To configure caffe2 with the CUDA compute capability that is optimal for your GPU," + ewarn "set TORCH_CUDA_ARCH_LIST in your make.conf, and re-emerge caffe2." + ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TORCH_CUDA_ARCH_LIST=7.5 3.5" + ewarn "For a Maxwell model GPU, an example value would be: TORCH_CUDA_ARCH_LIST=Maxwell" + ewarn "" + ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus" + ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'" + fi + + local mycmakeargs=( + -DBUILD_CUSTOM_PROTOBUF=OFF + -DBUILD_SHARED_LIBS=ON + + -DUSE_CCACHE=OFF + -DUSE_CUDA=$(usex cuda) + -DUSE_DISTRIBUTED=$(usex distributed) + -DUSE_MPI=$(usex mpi) + -DUSE_FAKELOWP=OFF + -DUSE_FBGEMM=$(usex fbgemm) + -DUSE_FLASH_ATTENTION=$(usex flash) + -DUSE_MEM_EFF_ATTENTION=OFF + -DUSE_GFLAGS=ON + -DUSE_GLOG=ON + -DUSE_GLOO=$(usex gloo) + -DUSE_KINETO=OFF # TODO + -DUSE_MAGMA=OFF # TODO: In GURU as sci-libs/magma + -DUSE_MKLDNN=$(usex onednn) + -DUSE_NNPACK=$(usex nnpack) + -DUSE_XNNPACK=$(usex xnnpack) + -DUSE_SYSTEM_XNNPACK=$(usex xnnpack) + -DUSE_TENSORPIPE=$(usex distributed) + -DUSE_PYTORCH_QNNPACK=$(usex qnnpack) + -DUSE_NUMPY=$(usex numpy) + -DUSE_OPENCL=$(usex opencl) + -DUSE_OPENMP=$(usex openmp) + -DUSE_ROCM=$(usex rocm) + -DUSE_SYSTEM_CPUINFO=ON + -DUSE_SYSTEM_PYBIND11=ON + -DUSE_UCC=OFF + -DUSE_VALGRIND=OFF + -DPython_EXECUTABLE="${PYTHON}" + -DUSE_ITT=OFF + -DUSE_SYSTEM_PTHREADPOOL=ON + -DUSE_SYSTEM_PSIMD=ON + -DUSE_SYSTEM_FXDIV=ON + -DUSE_SYSTEM_FP16=ON + -DUSE_SYSTEM_GLOO=ON + -DUSE_SYSTEM_ONNX=ON + -DUSE_SYSTEM_SLEEF=ON + -DUSE_PYTORCH_METAL=OFF + -DUSE_XPU=OFF + + -Wno-dev + -DTORCH_INSTALL_LIB_DIR="${EPREFIX}"/usr/$(get_libdir) + -DLIBSHM_INSTALL_LIB_SUBDIR="${EPREFIX}"/usr/$(get_libdir) + ) + + if use mkl; then + mycmakeargs+=(-DBLAS=MKL) + elif use openblas; then + mycmakeargs+=(-DBLAS=OpenBLAS) + else + mycmakeargs+=(-DBLAS=Generic -DBLAS_LIBRARIES=) + fi + + if use cuda; then + addpredict "/dev/nvidiactl" # bug 867706 + addpredict "/dev/char" + addpredict "/proc/self/task" # bug 926116 + + mycmakeargs+=( + -DUSE_CUDNN=ON + -DTORCH_CUDA_ARCH_LIST="${TORCH_CUDA_ARCH_LIST:-3.5 7.0}" + -DUSE_NCCL=OFF # TODO: NVIDIA Collective Communication Library + -DCMAKE_CUDA_FLAGS="$(cuda_gccdir -f | tr -d \")" + ) + elif use rocm; then + export PYTORCH_ROCM_ARCH="$(get_amdgpu_flags)" + + mycmakeargs+=( + -DUSE_NCCL=ON + -DUSE_SYSTEM_NCCL=ON + ) + + # ROCm libraries produce too much warnings + append-cxxflags -Wno-deprecated-declarations -Wno-unused-result + + if tc-is-clang; then + # fix mangling in LLVM: https://github.com/llvm/llvm-project/issues/85656 + append-cxxflags -fclang-abi-compat=17 + fi + fi + + if use onednn; then + mycmakeargs+=( + -DUSE_MKLDNN=ON + -DMKLDNN_FOUND=ON + -DMKLDNN_LIBRARIES=dnnl + -DMKLDNN_INCLUDE_DIR="${ESYSROOT}/usr/include/oneapi/dnnl" + ) + fi + + cmake_src_configure + + # do not rerun cmake and the build process in src_install + sed '/RERUN/,+1d' -i "${BUILD_DIR}"/build.ninja || die +} + +src_install() { + cmake_src_install + + insinto "/var/lib/${PN}" + doins "${BUILD_DIR}"/CMakeCache.txt + + rm -rf python + mkdir -p python/torch/include || die + mv "${ED}"/usr/lib/python*/site-packages/caffe2 python/ || die + cp torch/version.py python/torch/ || die + python_domodule python/caffe2 + python_domodule python/torch + ln -s ../../../../../include/torch \ + "${D}$(python_get_sitedir)"/torch/include/torch || die # bug 923269 +} diff --git a/sci-libs/caffe2/files/caffe2-2.4.0-exclude-aotriton.patch b/sci-libs/caffe2/files/caffe2-2.4.0-exclude-aotriton.patch new file mode 100644 index 000000000000..72ab792b2278 --- /dev/null +++ b/sci-libs/caffe2/files/caffe2-2.4.0-exclude-aotriton.patch @@ -0,0 +1,65 @@ +Disables aotriton download when both USE_FLASH_ATTENTION and USE_MEM_EFF_ATTENTION cmake flags are OFF +Backports upstream PR to 2.3.0: https://github.com/pytorch/pytorch/pull/130197 +--- a/aten/src/ATen/native/transformers/cuda/sdp_utils.cpp ++++ b/aten/src/ATen/native/transformers/cuda/sdp_utils.cpp +@@ -24,7 +24,7 @@ + #include <c10/core/SymInt.h> + #include <c10/util/string_view.h> + +-#if USE_ROCM ++#if defined(USE_ROCM) && (defined(USE_MEM_EFF_ATTENTION) || defined(USE_FLASH_ATTENTION)) + #include <aotriton/flash.h> + #endif + +@@ -207,7 +207,7 @@ bool check_flash_attention_hardware_support(sdp_params const& params, bool debug + // Check that the gpu is capable of running flash attention + using sm80 = SMVersion<8, 0>; + using sm90 = SMVersion<9, 0>; +-#if USE_ROCM ++#if defined(USE_ROCM) && defined(USE_FLASH_ATTENTION) + auto stream = at::cuda::getCurrentCUDAStream().stream(); + if (hipSuccess != aotriton::v2::flash::check_gpu(stream)) { + auto dprops = at::cuda::getCurrentDeviceProperties(); +@@ -238,7 +238,7 @@ bool check_mem_efficient_hardware_support(sdp_params const& params, bool debug) + // Mem Efficient attention supports hardware in the range [sm_50, sm_90] + using sm50 = SMVersion<5, 0>; + using sm90 = SMVersion<9, 0>; +-#if USE_ROCM ++#if defined(USE_ROCM) && defined(USE_MEM_EFF_ATTENTION) + auto stream = at::cuda::getCurrentCUDAStream().stream(); + if (hipSuccess != aotriton::v2::flash::check_gpu(stream)) { + auto dprops = at::cuda::getCurrentDeviceProperties(); +@@ -623,7 +623,7 @@ bool can_use_mem_efficient_attention(sdp_params const& params, bool debug) { + array_of<at::ScalarType>(at::kHalf, at::kFloat, at::kBFloat16); + constexpr auto less_than_sm80_mem_efficient_dtypes = + array_of<at::ScalarType>(at::kHalf, at::kFloat); +-#ifdef USE_ROCM ++#if defined(USE_ROCM) && defined(USE_MEM_EFF_ATTENTION) + constexpr auto aotriton_mem_efficient_dtypes = + array_of<at::ScalarType>(at::kHalf, at::kFloat, at::kBFloat16); + #endif +@@ -668,7 +668,7 @@ bool can_use_mem_efficient_attention(sdp_params const& params, bool debug) { + } + } + +-#ifdef USE_ROCM ++#if defined(USE_ROCM) && defined(USE_MEM_EFF_ATTENTION) + return check_tensor_dtype(params, aotriton_mem_efficient_dtypes, debug); + #else + auto dprop = at::cuda::getCurrentDeviceProperties(); +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -1095,10 +1095,12 @@ if(USE_ROCM) + message(STATUS "Disabling Kernel Assert for ROCm") + endif() + +- include(${CMAKE_CURRENT_LIST_DIR}/External/aotriton.cmake) + if(USE_CUDA) + caffe2_update_option(USE_MEM_EFF_ATTENTION OFF) + endif() ++ if(USE_FLASH_ATTENTION OR USE_MEM_EFF_ATTENTION) ++ include(${CMAKE_CURRENT_LIST_DIR}/External/aotriton.cmake) ++ endif() + else() + caffe2_update_option(USE_ROCM OFF) + endif() diff --git a/sci-libs/caffe2/files/caffe2-2.4.0-fix-openmp-link.patch b/sci-libs/caffe2/files/caffe2-2.4.0-fix-openmp-link.patch new file mode 100644 index 000000000000..9b0fe0b97c0f --- /dev/null +++ b/sci-libs/caffe2/files/caffe2-2.4.0-fix-openmp-link.patch @@ -0,0 +1,14 @@ +Fix "undefined symbol: omp_get_max_active_levels" in mkl + <nothing else> builds +https://github.com/pytorch/pytorch/issues/116576 +--- a/caffe2/CMakeLists.txt ++++ b/caffe2/CMakeLists.txt +@@ -1643,6 +1643,9 @@ if(BUILD_SHARED_LIBS) + if(CAFFE2_USE_MKL) + target_link_libraries(torch_global_deps caffe2::mkl) + endif() ++ if(USE_OPENMP) ++ target_link_libraries(torch_global_deps OpenMP::OpenMP_CXX) ++ endif() + # The CUDA libraries are linked here for a different reason: in some + # cases we load these libraries with ctypes, and if they weren't opened + # with RTLD_GLOBAL, we'll do the "normal" search process again (and diff --git a/sci-libs/caffe2/files/caffe2-2.4.0-gentoo.patch b/sci-libs/caffe2/files/caffe2-2.4.0-gentoo.patch new file mode 100644 index 000000000000..d592a346386b --- /dev/null +++ b/sci-libs/caffe2/files/caffe2-2.4.0-gentoo.patch @@ -0,0 +1,211 @@ +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -966,12 +966,11 @@ endif() + # third_party/FBGEMM + include(cmake/public/utils.cmake) + if(NOT MSVC) +- string(APPEND CMAKE_CXX_FLAGS " -O2 -fPIC") ++ string(APPEND CMAKE_CXX_FLAGS " -O2") + # Eigen fails to build with some versions, so convert this to a warning + # Details at http://eigen.tuxfamily.org/bz/show_bug.cgi?id=1459 + string(APPEND CMAKE_CXX_FLAGS " -Wall") + string(APPEND CMAKE_CXX_FLAGS " -Wextra") +- append_cxx_flag_if_supported("-Werror=return-type" CMAKE_CXX_FLAGS) + append_cxx_flag_if_supported("-Werror=non-virtual-dtor" CMAKE_CXX_FLAGS) + append_cxx_flag_if_supported("-Werror=braced-scalar-init" CMAKE_CXX_FLAGS) + append_cxx_flag_if_supported("-Werror=range-loop-construct" CMAKE_CXX_FLAGS) +@@ -1074,7 +1073,6 @@ if(NOT MSVC) + string(APPEND CMAKE_LINKER_FLAGS_DEBUG " -fno-omit-frame-pointer -O0") + append_cxx_flag_if_supported("-fno-math-errno" CMAKE_CXX_FLAGS) + append_cxx_flag_if_supported("-fno-trapping-math" CMAKE_CXX_FLAGS) +- append_cxx_flag_if_supported("-Werror=format" CMAKE_CXX_FLAGS) + else() + # skip unwanted includes from windows.h + add_compile_definitions(WIN32_LEAN_AND_MEAN) +--- a/aten/src/ATen/native/quantized/cpu/qnnpack/CMakeLists.txt ++++ b/aten/src/ATen/native/quantized/cpu/qnnpack/CMakeLists.txt +@@ -324,16 +324,8 @@ set_target_properties(pytorch_qnnpack PROPERTIES PUBLIC_HEADER include/pytorch_q + set_target_properties(pytorch_qnnpack PROPERTIES PUBLIC_HEADER include/qnnpack_func.h) + + # ---[ Configure clog +-if(NOT TARGET clog) +- set(CLOG_BUILD_TESTS OFF CACHE BOOL "") +- set(CLOG_RUNTIME_TYPE "${CPUINFO_RUNTIME_TYPE}" CACHE STRING "") +- add_subdirectory( +- "${CLOG_SOURCE_DIR}" +- "${CONFU_DEPENDENCIES_BINARY_DIR}/clog") +- # We build static version of clog but a dynamic library may indirectly depend on it +- set_property(TARGET clog PROPERTY POSITION_INDEPENDENT_CODE ON) +-endif() +-target_link_libraries(pytorch_qnnpack PUBLIC clog) ++find_library(CLOG_LIBRARY NAMES clog REQUIRED) ++target_link_libraries(pytorch_qnnpack PUBLIC ${CLOG_LIBRARY}) + + # ---[ Configure cpuinfo + if(NOT TARGET cpuinfo AND USE_SYSTEM_CPUINFO) +--- a/c10/CMakeLists.txt ++++ b/c10/CMakeLists.txt +@@ -94,7 +94,7 @@ if(NOT BUILD_LIBTORCHLESS) + if(C10_USE_GLOG) + target_link_libraries(c10 PUBLIC glog::glog) + endif() +- target_link_libraries(c10 PRIVATE fmt::fmt-header-only) ++ target_link_libraries(c10 PRIVATE fmt) + + if(C10_USE_NUMA) + message(STATUS "NUMA paths:") +--- a/caffe2/CMakeLists.txt ++++ b/caffe2/CMakeLists.txt +@@ -87,7 +87,7 @@ endif() + # Note: the folders that are being commented out have not been properly + # addressed yet. + +-if(NOT MSVC AND USE_XNNPACK) ++if(FALSE) + if(NOT TARGET fxdiv) + set(FXDIV_BUILD_TESTS OFF CACHE BOOL "") + set(FXDIV_BUILD_BENCHMARKS OFF CACHE BOOL "") +@@ -1075,7 +1075,6 @@ if(USE_XPU) + endif() + + if(NOT MSVC AND USE_XNNPACK) +- TARGET_LINK_LIBRARIES(torch_cpu PRIVATE fxdiv) + endif() + + # ========================================================== +@@ -1178,8 +1177,7 @@ target_include_directories(torch_cpu PRIVATE + target_include_directories(torch_cpu PRIVATE + ${TORCH_ROOT}/third_party/miniz-2.1.0) + +-target_include_directories(torch_cpu PRIVATE +- ${TORCH_ROOT}/third_party/kineto/libkineto/include) ++target_include_directories(torch_cpu PRIVATE /usr/include/kineto) + + if(USE_KINETO) + target_include_directories(torch_cpu PRIVATE +--- a/cmake/Codegen.cmake ++++ b/cmake/Codegen.cmake +@@ -57,7 +57,7 @@ if(INTERN_BUILD_ATEN_OPS) + if(MSVC) + set(OPT_FLAG "/fp:strict ") + else(MSVC) +- set(OPT_FLAG "-O3 ") ++ set(OPT_FLAG " ") + if("${CMAKE_BUILD_TYPE}" MATCHES "Debug") + set(OPT_FLAG " ") + endif() +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -466,7 +466,9 @@ if(USE_PYTORCH_QNNPACK) + set_property(TARGET pytorch_qnnpack PROPERTY POSITION_INDEPENDENT_CODE ON) + set_property(TARGET cpuinfo PROPERTY POSITION_INDEPENDENT_CODE ON) + # QNNPACK depends on gemmlowp headers +- target_include_directories(pytorch_qnnpack PRIVATE "${CAFFE2_THIRD_PARTY_ROOT}/gemmlowp") ++ find_package(gemmlowp REQUIRED) ++ get_target_property(GEMMLOWP_INCLUDE_DIRS gemmlowp::gemmlowp INTERFACE_INCLUDE_DIRECTORIES) ++ target_include_directories(pytorch_qnnpack PRIVATE ${GEMMLOWP_INCLUDE_DIRS}) + + if(PYTORCH_QNNPACK_CUSTOM_THREADPOOL) + target_compile_definitions( +@@ -705,7 +707,7 @@ if(BUILD_TEST OR BUILD_MOBILE_BENCHMARK OR BUILD_MOBILE_TEST) + endif() + + # ---[ FBGEMM +-if(USE_FBGEMM) ++if(FALSE) + set(CAFFE2_THIRD_PARTY_ROOT "${PROJECT_SOURCE_DIR}/third_party") + if(NOT DEFINED FBGEMM_SOURCE_DIR) + set(FBGEMM_SOURCE_DIR "${CAFFE2_THIRD_PARTY_ROOT}/fbgemm" CACHE STRING "FBGEMM source directory") +@@ -753,6 +755,7 @@ if(USE_FBGEMM) + endif() + + if(USE_FBGEMM) ++ list(APPEND Caffe2_DEPENDENCY_LIBS fbgemm) + caffe2_update_option(USE_FBGEMM ON) + else() + caffe2_update_option(USE_FBGEMM OFF) +@@ -1288,7 +1291,6 @@ if(CAFFE2_CMAKE_BUILDING_WITH_MAIN_REPO AND NOT INTERN_DISABLE_ONNX) + set_target_properties(onnx_proto PROPERTIES CXX_STANDARD 17) + endif() + endif() +- add_subdirectory(${CMAKE_CURRENT_LIST_DIR}/../third_party/foxi EXCLUDE_FROM_ALL) + + add_definitions(-DONNX_NAMESPACE=${ONNX_NAMESPACE}) + if(NOT USE_SYSTEM_ONNX) +@@ -1530,7 +1532,6 @@ endif() + # + set(TEMP_BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS}) + set(BUILD_SHARED_LIBS OFF CACHE BOOL "Build shared libs" FORCE) +-add_subdirectory(${PROJECT_SOURCE_DIR}/third_party/fmt) + + # Disable compiler feature checks for `fmt`. + # +@@ -1539,9 +1540,7 @@ add_subdirectory(${PROJECT_SOURCE_DIR}/third_party/fmt) + # CMAKE_CXX_FLAGS in ways that break feature checks. Since we already know + # `fmt` is compatible with a superset of the compilers that PyTorch is, it + # shouldn't be too bad to just disable the checks. +-set_target_properties(fmt-header-only PROPERTIES INTERFACE_COMPILE_FEATURES "") + +-list(APPEND Caffe2_DEPENDENCY_LIBS fmt::fmt-header-only) + set(BUILD_SHARED_LIBS ${TEMP_BUILD_SHARED_LIBS} CACHE BOOL "Build shared libs" FORCE) + + # ---[ Kineto +--- a/cmake/External/nnpack.cmake ++++ b/cmake/External/nnpack.cmake +@@ -56,7 +56,7 @@ if(ANDROID OR IOS OR ${CMAKE_SYSTEM_NAME} STREQUAL "Linux" OR ${CMAKE_SYSTEM_NAM + set(PTHREADPOOL_SOURCE_DIR "${CAFFE2_THIRD_PARTY_ROOT}/pthreadpool" CACHE STRING "pthreadpool source directory") + set(GOOGLETEST_SOURCE_DIR "${CAFFE2_THIRD_PARTY_ROOT}/googletest" CACHE STRING "Google Test source directory") + +- if(NOT TARGET nnpack) ++ if(FALSE) + if(NOT USE_SYSTEM_PTHREADPOOL AND USE_INTERNAL_PTHREADPOOL_IMPL) + set(NNPACK_CUSTOM_THREADPOOL ON CACHE BOOL "") + endif() +--- a/cmake/public/utils.cmake ++++ b/cmake/public/utils.cmake +@@ -483,8 +483,6 @@ function(torch_compile_options libname) + endif() + + # Use -O2 for release builds (-O3 doesn't improve perf, and -Os results in perf regression) +- target_compile_options(${libname} PRIVATE +- $<$<AND:$<COMPILE_LANGUAGE:CXX>,$<OR:$<CONFIG:Release>,$<CONFIG:RelWithDebInfo>>>:-O2>) + + endfunction() + +--- a/functorch/CMakeLists.txt ++++ b/functorch/CMakeLists.txt +@@ -42,4 +42,4 @@ endif() + if(NOT ${TORCH_PYTHON_LINK_FLAGS} STREQUAL "") + set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS ${TORCH_PYTHON_LINK_FLAGS}) + endif() +-install(TARGETS ${PROJECT_NAME} DESTINATION "${CMAKE_CURRENT_SOURCE_DIR}") ++install(TARGETS ${PROJECT_NAME} DESTINATION "${CMAKE_INSTALL_LIBDIR}") +--- a/torch/CMakeLists.txt ++++ b/torch/CMakeLists.txt +@@ -59,16 +59,8 @@ set(TORCH_PYTHON_INCLUDE_DIRECTORIES + ${CMAKE_BINARY_DIR} + ${CMAKE_BINARY_DIR}/aten/src + ${CMAKE_BINARY_DIR}/caffe2/aten/src +- ${CMAKE_BINARY_DIR}/third_party +- ${CMAKE_BINARY_DIR}/third_party/onnx +- +- ${TORCH_ROOT}/third_party/valgrind-headers +- +- ${TORCH_ROOT}/third_party/gloo +- ${TORCH_ROOT}/third_party/onnx +- ${TORCH_ROOT}/third_party/flatbuffers/include +- ${TORCH_ROOT}/third_party/kineto/libkineto/include +- ${TORCH_ROOT}/third_party/cpp-httplib ++ ++ /usr/include/kineto + + ${TORCH_SRC_DIR}/csrc + ${TORCH_SRC_DIR}/csrc/api/include +@@ -83,7 +75,6 @@ set(TORCH_PYTHON_LINK_LIBRARIES + opentelemetry::api + httplib + shm +- fmt::fmt-header-only + ATEN_CPU_FILES_GEN_LIB) + + if(USE_ASAN AND TARGET Sanitizer::address) diff --git a/sci-libs/caffe2/files/caffe2-2.4.0-install-dirs.patch b/sci-libs/caffe2/files/caffe2-2.4.0-install-dirs.patch new file mode 100644 index 000000000000..ee6e8fb91562 --- /dev/null +++ b/sci-libs/caffe2/files/caffe2-2.4.0-install-dirs.patch @@ -0,0 +1,70 @@ +--- a/c10/CMakeLists.txt ++++ b/c10/CMakeLists.txt +@@ -157,7 +157,7 @@ if(NOT BUILD_LIBTORCHLESS) + # Note: for now, we will put all export path into one single Caffe2Targets group + # to deal with the cmake deployment need. Inside the Caffe2Targets set, the + # individual libraries like libc10.so and libcaffe2.so are still self-contained. +- install(TARGETS c10 EXPORT Caffe2Targets DESTINATION lib) ++ install(TARGETS c10 EXPORT Caffe2Targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) + endif() + + install(DIRECTORY ${CMAKE_CURRENT_LIST_DIR} +--- a/c10/cuda/CMakeLists.txt ++++ b/c10/cuda/CMakeLists.txt +@@ -82,7 +82,7 @@ if(NOT BUILD_LIBTORCHLESS) + # Note: for now, we will put all export path into one single Caffe2Targets group + # to deal with the cmake deployment need. Inside the Caffe2Targets set, the + # individual libraries like libc10.so and libcaffe2.so are still self-contained. +-install(TARGETS c10_cuda EXPORT Caffe2Targets DESTINATION lib) ++install(TARGETS c10_cuda EXPORT Caffe2Targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) + + endif() + +--- a/c10/hip/CMakeLists.txt ++++ b/c10/hip/CMakeLists.txt +@@ -57,7 +57,7 @@ if(NOT BUILD_LIBTORCHLESS) + $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../..> + $<BUILD_INTERFACE:${CMAKE_BINARY_DIR}> + $<INSTALL_INTERFACE:include>) +- install(TARGETS c10_hip EXPORT Caffe2Targets DESTINATION lib) ++ install(TARGETS c10_hip EXPORT Caffe2Targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) + set(C10_HIP_LIB c10_hip) + endif() + +--- a/c10/xpu/CMakeLists.txt ++++ b/c10/xpu/CMakeLists.txt +@@ -45,7 +45,7 @@ target_include_directories( + $<BUILD_INTERFACE:${CMAKE_BINARY_DIR}> + $<INSTALL_INTERFACE:include> + ) +- install(TARGETS c10_xpu EXPORT Caffe2Targets DESTINATION lib) ++ install(TARGETS c10_xpu EXPORT Caffe2Targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) + set(C10_XPU_LIB c10_xpu) + add_subdirectory(test) + endif() +--- a/test/cpp/c10d/CMakeLists.txt ++++ b/test/cpp/c10d/CMakeLists.txt +@@ -64,7 +64,7 @@ if(USE_CUDA) + torch_cpu c10d_cuda_test gtest_main __caffe2_ucc) + if(INSTALL_TEST) + install(TARGETS ProcessGroupUCCTest DESTINATION bin) +- install(TARGETS c10d_cuda_test DESTINATION lib) ++ install(TARGETS c10d_cuda_test DESTINATION ${CMAKE_INSTALL_LIBDIR}) + endif() + endif() + else() +--- a/test/cpp/jit/CMakeLists.txt ++++ b/test/cpp/jit/CMakeLists.txt +@@ -32,9 +32,9 @@ endif() + target_link_libraries(backend_with_compiler torch) + + if(INSTALL_TEST) +- install(TARGETS torchbind_test DESTINATION lib) +- install(TARGETS jitbackend_test DESTINATION lib) +- install(TARGETS backend_with_compiler DESTINATION lib) ++ install(TARGETS torchbind_test DESTINATION ${CMAKE_INSTALL_LIBDIR}) ++ install(TARGETS jitbackend_test DESTINATION ${CMAKE_INSTALL_LIBDIR}) ++ install(TARGETS backend_with_compiler DESTINATION ${CMAKE_INSTALL_LIBDIR}) + endif() + + # Build the cpp gtest binary containing the cpp-only tests. diff --git a/sci-libs/caffe2/files/caffe2-2.4.0-rocm-fix-std-cpp17.patch b/sci-libs/caffe2/files/caffe2-2.4.0-rocm-fix-std-cpp17.patch new file mode 100644 index 000000000000..3612c3db1a0b --- /dev/null +++ b/sci-libs/caffe2/files/caffe2-2.4.0-rocm-fix-std-cpp17.patch @@ -0,0 +1,50 @@ +Fix for error: invalid argument '-std=c++17' not allowed with 'C' +https://github.com/pytorch/pytorch/issues/103222 +--- a/c10/hip/CMakeLists.txt ++++ b/c10/hip/CMakeLists.txt +@@ -36,6 +36,7 @@ if(NOT BUILD_LIBTORCHLESS) + + # Propagate HIP_CXX_FLAGS that were set from Dependencies.cmake + target_compile_options(c10_hip PRIVATE ${HIP_CXX_FLAGS}) ++ set_target_properties(c10_hip PROPERTIES CXX_STANDARD 17 CXX_EXTENSIONS OFF) + + # caffe2_hip adds a bunch of dependencies like rocsparse, but c10/hip is supposed to be + # minimal. I'm not sure if we need hip_hcc or not; for now leave it out +--- a/caffe2/CMakeLists.txt ++++ b/caffe2/CMakeLists.txt +@@ -1670,6 +1670,7 @@ if(USE_ROCM) + + # Since PyTorch files contain HIP headers, these flags are required for the necessary definitions to be added. + target_compile_options(torch_hip PUBLIC ${HIP_CXX_FLAGS}) # experiment ++ set_target_properties(torch_hip PROPERTIES CXX_STANDARD 17 CXX_EXTENSIONS OFF) + + target_link_libraries(torch_hip PUBLIC c10_hip) + +@@ -1867,6 +1868,7 @@ if(BUILD_TEST) + target_include_directories(${test_name} PRIVATE $<INSTALL_INTERFACE:include>) + target_include_directories(${test_name} PRIVATE ${Caffe2_CPU_INCLUDE} ${Caffe2_HIP_INCLUDE}) + target_compile_options(${test_name} PRIVATE ${HIP_CXX_FLAGS}) ++ set_target_properties(${test_name} PROPERTIES CXX_STANDARD 17 CXX_EXTENSIONS OFF) + add_test(NAME ${test_name} COMMAND $<TARGET_FILE:${test_name}>) + if(INSTALL_TEST) + install(TARGETS ${test_name} DESTINATION test) +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -1050,7 +1050,6 @@ if(USE_ROCM) + list(APPEND HIP_CXX_FLAGS -Wno-duplicate-decl-specifier) + list(APPEND HIP_CXX_FLAGS -DCAFFE2_USE_MIOPEN) + list(APPEND HIP_CXX_FLAGS -DTHRUST_DEVICE_SYSTEM=THRUST_DEVICE_SYSTEM_HIP) +- list(APPEND HIP_CXX_FLAGS -std=c++17) + list(APPEND HIP_CXX_FLAGS -DHIPBLAS_V2) + if(HIP_NEW_TYPE_ENUMS) + list(APPEND HIP_CXX_FLAGS -DHIP_NEW_TYPE_ENUMS) +--- a/cmake/public/utils.cmake ++++ b/cmake/public/utils.cmake +@@ -332,6 +332,7 @@ function(caffe2_hip_binary_target target_name_or_src) + caffe2_binary_target(${target_name_or_src}) + + target_compile_options(${__target} PRIVATE ${HIP_CXX_FLAGS}) ++ set_target_properties(${__target} PROPERTIES CXX_STANDARD 17 CXX_EXTENSIONS OFF) + target_include_directories(${__target} PRIVATE ${Caffe2_HIP_INCLUDE}) + endfunction() + |