diff options
author | V3n3RiX <venerix@redcorelinux.org> | 2020-12-18 11:06:49 +0000 |
---|---|---|
committer | V3n3RiX <venerix@redcorelinux.org> | 2020-12-18 11:06:49 +0000 |
commit | ab3da91fb6c91a9df52fff8f991570f456fd3c7a (patch) | |
tree | e8f3bfa2c6c3d20ec3b9c352c839e23949068b6b /sys-cluster/slurm | |
parent | 6abbf81ef2f298e3221ff5e67a1f3c5f23958212 (diff) |
gentoo resync : 18.12.2020
Diffstat (limited to 'sys-cluster/slurm')
-rw-r--r-- | sys-cluster/slurm/Manifest | 2 | ||||
-rw-r--r-- | sys-cluster/slurm/files/slurm-20.11.0.1_autoconf-lua.patch | 50 | ||||
-rw-r--r-- | sys-cluster/slurm/slurm-20.11.0.1-r100.ebuild | 269 |
3 files changed, 321 insertions, 0 deletions
diff --git a/sys-cluster/slurm/Manifest b/sys-cluster/slurm/Manifest index 030cc2728a4c..6e161b14b6e7 100644 --- a/sys-cluster/slurm/Manifest +++ b/sys-cluster/slurm/Manifest @@ -1,9 +1,11 @@ AUX logrotate 360 BLAKE2B 36c5ce908a6434f03a3e140cd1fc0bfec50a6fa89d6120061a5a83d3b12d9a6137ae2fa3a83bd36f55f48c79d140f24d6933cda18fc79674e8b758cacc34a948 SHA512 f6edceef15960accbe72ddd34ff34934fa46b6d647c83e0d3a512d2ddc1ae6c60352e377cfa5332fd5cd817689e23680f7384f1b27fc49677b2e671e1bfe86b9 +AUX slurm-20.11.0.1_autoconf-lua.patch 1769 BLAKE2B 6a77e3bfde6e18a14e07f07a9fb3c334732e4becce39ecf4c6352d06622999aa08c9cfe455b03656836ae6f3d6af7ad64e36851b66c1a6e724b6236fd08c3cdb SHA512 f142484dcaeac6496095e894de0a6288a15189e04d7ac32a353923b4dfedc1dcb3da9bfacb9bc6c7dae37d554194b937c59de2943785a2e71797c8b35832cc7f AUX slurm.confd 155 BLAKE2B 4f0793808328b88ffa2b32b1accab6c0dfe0b01dc84210fee6a674361ddf9ea7b3f15ccc775c354198577849ab0791d403b153adee476fc7f1237d2177275bab SHA512 e4d21cdb6a427f0beaef0e157202308f9e86a41dea4532ee98b274a5979b5e82ec2ca4ca2996f6cb59045fd8b01e815aaad71673e2011cead788de3d879aa11f AUX slurm.tmpfiles 31 BLAKE2B f08dbc53517cc15463be5264505d858fcf1f2196565868d164463a167459082a23eea25fa22edf2987e52c71f953160c12b29cdb5e5f437a3bdc89f1cd8395b5 SHA512 b7f116687e0dee0377b43cbb49f63569088d6f5f9b1514014999b1135b1b4da462e04887851b753161d7b9691436048b692b7697ffee2a4e45daa339a412c93c AUX slurmctld.initd 1842 BLAKE2B 25c9fd76dc888544888abb2790d8a0ae66254c0f8807bf48433a54fd34f49d2f4f95b5c6dd9e4ab1f40938c274be6be2e88cdb8959ecb12b4a86d07bc0a5bc86 SHA512 4356552630d1a154bbf369012c0315baf08b5b98ff6411d66ea3b1b039bfff5f105091b110e8fa1e64209eb0990038abda86d980cf2316ca97a5c10a43243df6 AUX slurmd.initd 2001 BLAKE2B a0464ee0407406c7ceadd8e78f0229f5c6eda7081a691e24e609caa50f627d9bbf9c2d57bc6746b9c719a66cf2f4b0d756ecbbaf3bcd070846ad5ced6b289c9d SHA512 bb9447c25d2e96de1aea5935084ea78ea403701bd655d89d1a9d1d21bdbf88bdbae3888cfd618aabcc86dd8808401157ff84402ef35c4e138263457fd954046a AUX slurmdbd.initd 1717 BLAKE2B e177a937f8bd97bb2f45e479e0f34b23cc23ab44b0000165a856ff8476560638a567baecad8aa8d57aeb76062f248039da7f440ddca8f5827e4729136a10d7de SHA512 6d21f667f4222ab62dc8d7f648e00205da982612d58b98e8e35e6b6602b3ad31c03e5130402437c77d3677b424da7b5226e2bfa8c5d29bc908fd618fc02697a8 DIST slurm-20-11-0-1.tar.gz 8762691 BLAKE2B 1e24a04b3faf97f4df98bd982d67d47f79670b863f466326e45e00fac0b99124122e94e30054a97471e4337234cbd213806791bdd5b3bddfeb05bef55f42931e SHA512 9a57f2610f7294ba6cd265746b874e32a7930100f30c0adf41d95b261787c441bdcacaa641f0b9785150b94fc7d6b5b7608ec618f668bffcae6f774752f17678 +EBUILD slurm-20.11.0.1-r100.ebuild 7907 BLAKE2B ab2d9574430f650d9e216020bd7b68f7de3ddbd6d743cf422cfc57005705f6b0f021b818914ff2bf49bb0aa9b72c7866641e021f197f4e46b36ecea1c34dfe4b SHA512 fb202e3ca41c54a4f05c1bfce2770aa50b99bca9018a1e8e544316b2927a343bf4e727fbf004dd939ef24aade817633cff8f3e2d69c5dfcc7424c5ea4f95c4ee EBUILD slurm-20.11.0.1.ebuild 7764 BLAKE2B 252d3cb8c68645d40b1d0bfe28295d896985787c7b66213569f55ce39d40a2fbfdcac327963453e499948c11f81e9f278864a3174eb9fe3cf86bbe10b59af094 SHA512 3b69f91188cbd5cd436989c2f561c5cf553c2522c7fb46b072fd4b9e55f60f619a286f237f752f0c5072623989cbc7a5c89191a2b6d8144582219f1f1b7fc03c MISC metadata.xml 1199 BLAKE2B b52e7cae7554d439e7f25da2a80d98993d5c7f7abd5ba8b76c09bf60731b035040f976ac1bfc3c153f81b8feed89e2ceb5e6b676c25cc7b6219c4bcd87bd7e50 SHA512 8f48d582e61789373fc07ffd1db123cb8f43d337b03611972e480179ca5b43f2866392203f470d3d69784f2aae79cbcffdf583cba0aa4a9c25969f2c72a3f55f diff --git a/sys-cluster/slurm/files/slurm-20.11.0.1_autoconf-lua.patch b/sys-cluster/slurm/files/slurm-20.11.0.1_autoconf-lua.patch new file mode 100644 index 000000000000..f17039e0b59d --- /dev/null +++ b/sys-cluster/slurm/files/slurm-20.11.0.1_autoconf-lua.patch @@ -0,0 +1,50 @@ +Prevent automagical detection of Lua, abort if Lua support has been +requested but Lua cannot be found with pkg-config, and use the Lua +pkg-config module set up by the eclass-provided wrapper (or unslotted Lua) +instead of enumerating installed versions. + +--- a/auxdir/x_ac_lua.m4 ++++ b/auxdir/x_ac_lua.m4 +@@ -12,18 +12,21 @@ + + AC_DEFUN([X_AC_LUA], + [ +- x_ac_lua_pkg_name="lua" +- #check for 5.3 then 5.2 then 5.1 +- PKG_CHECK_EXISTS([lua5.3], [x_ac_lua_pkg_name=lua5.3], +- [PKG_CHECK_EXISTS([lua-5.3], [x_ac_lua_pkg_name=lua-5.3], +- [PKG_CHECK_EXISTS([lua5.2], [x_ac_lua_pkg_name=lua5.2], +- [PKG_CHECK_EXISTS([lua-5.2], [x_ac_lua_pkg_name=lua-5.2], +- [PKG_CHECK_EXISTS([lua5.1], [x_ac_lua_pkg_name=lua5.1], +- [PKG_CHECK_EXISTS([lua-5.1], [x_ac_lua_pkg_name=lua-5.1], +- [x_ac_lua_pkg_name="lua >= 5.1"])])])])])]) +- PKG_CHECK_MODULES([lua], ${x_ac_lua_pkg_name}, +- [x_ac_have_lua="yes"], +- [x_ac_have_lua="no"]) ++ AC_MSG_CHECKING([whether to enable support for job scripts written in Lua]) ++ AC_ARG_ENABLE( ++ [lua], ++ AS_HELP_STRING(--enable-lua, enable support for job scripts written in Lua), ++ [x_ac_enable_lua="$enableval"], ++ [x_ac_enable_lua=yes]) ++ AC_MSG_RESULT([$x_ac_enable_lua]) ++ ++ x_ac_have_lua=no ++ if test "x$x_ac_enable_lua" = "xyes"; then ++ x_ac_lua_pkg_name="lua" ++ PKG_CHECK_MODULES([lua], ${x_ac_lua_pkg_name}, ++ [x_ac_have_lua="yes"], ++ [AC_MSG_ERROR([Lua support requested but Lua not found])]) ++ fi + + if test "x$x_ac_have_lua" = "xyes"; then + saved_CFLAGS="$CFLAGS" +@@ -52,8 +55,6 @@ + fi + CFLAGS="$saved_CFLAGS" + LIBS="$saved_LIBS" +- else +- AC_MSG_WARN([unable to locate lua package]) + fi + + AM_CONDITIONAL(HAVE_LUA, test "x$x_ac_have_lua" = "xyes") diff --git a/sys-cluster/slurm/slurm-20.11.0.1-r100.ebuild b/sys-cluster/slurm/slurm-20.11.0.1-r100.ebuild new file mode 100644 index 000000000000..ab498c397291 --- /dev/null +++ b/sys-cluster/slurm/slurm-20.11.0.1-r100.ebuild @@ -0,0 +1,269 @@ +# Copyright 1999-2020 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=7 + +LUA_COMPAT=( lua5-{1..3} ) + +if [[ ${PV} == *9999* ]]; then + EGIT_REPO_URI="https://github.com/SchedMD/slurm.git" + INHERIT_GIT="git-r3" + MY_P="${P}" +else + if [[ ${PV} == *pre* || ${PV} == *rc* ]]; then + MY_PV=$(ver_rs '-0.') # pre-releases or release-candidate + else + MY_PV=$(ver_rs 1-4 '-') # stable releases + fi + MY_P="${PN}-${MY_PV}" + INHERIT_GIT="" + SRC_URI="https://github.com/SchedMD/slurm/archive/${MY_P}.tar.gz" + KEYWORDS="~amd64 ~x86" +fi + +inherit autotools bash-completion-r1 lua-single pam perl-module prefix toolchain-funcs systemd ${INHERIT_GIT} + +DESCRIPTION="A Highly Scalable Resource Manager" +HOMEPAGE="https://www.schedmd.com https://github.com/SchedMD/slurm" + +LICENSE="GPL-2" +SLOT="0" +IUSE="debug hdf5 html ipmi json lua multiple-slurmd +munge mysql netloc numa ofed pam perl slurmdbd static-libs ucx torque X" + +COMMON_DEPEND=" + !sys-cluster/torque + !net-analyzer/slurm + !net-analyzer/sinfo + || ( sys-cluster/pmix[-pmi] >=sys-cluster/openmpi-2.0.0 ) + mysql? ( + || ( dev-db/mariadb-connector-c dev-db/mysql-connector-c ) + slurmdbd? ( || ( dev-db/mariadb:* dev-db/mysql:* ) ) + ) + munge? ( sys-auth/munge ) + pam? ( sys-libs/pam ) + lua? ( ${LUA_DEPS} ) + ipmi? ( sys-libs/freeipmi ) + json? ( dev-libs/json-c:= ) + amd64? ( netloc? ( || ( sys-apps/netloc >=sys-apps/hwloc-2.1.0[netloc] ) ) ) + hdf5? ( sci-libs/hdf5:= ) + numa? ( sys-process/numactl ) + ofed? ( sys-fabric/ofed ) + ucx? ( sys-cluster/ucx ) + X? ( net-libs/libssh2 ) + >=sys-apps/hwloc-1.1.1-r1 + sys-libs/ncurses:0= + app-arch/lz4:0= + dev-libs/glib:2= + sys-libs/readline:0=" + +DEPEND="${COMMON_DEPEND} + html? ( sys-apps/man2html )" + +RDEPEND="${COMMON_DEPEND} + acct-user/slurm + acct-group/slurm + dev-libs/libcgroup" + +REQUIRED_USE="lua? ( ${LUA_REQUIRED_USE} ) + torque? ( perl )" + +S="${WORKDIR}/${PN}-${MY_P}" + +LIBSLURM_PERL_S="${S}/contribs/perlapi/libslurm/perl" +LIBSLURMDB_PERL_S="${S}/contribs/perlapi/libslurmdb/perl" + +RESTRICT="test" + +PATCHES=( + "${FILESDIR}"/${PN}-20.11.0.1_autoconf-lua.patch +) + +src_unpack() { + if [[ ${PV} == *9999* ]]; then + git-r3_src_unpack + else + default + fi +} + +src_prepare() { + tc-ld-disable-gold + default + + # pids should go to /var/run/slurm + sed \ + -e 's:/tmp:/var/tmp:g' \ + -e "s:/var/run/slurmctld.pid:${EPREFIX}/run/slurm/slurmctld.pid:g" \ + -e "s:/var/run/slurmd.pid:${EPREFIX}/run/slurm/slurmd.pid:g" \ + -e "s:StateSaveLocation=.*:StateSaveLocation=${EPREFIX}/var/spool/slurm:g" \ + -e "s:SlurmdSpoolDir=.*:SlurmdSpoolDir=${EPREFIX}/var/spool/slurm/slurmd:g" \ + -i "${S}/etc/slurm.conf.example" \ + || die "Can't sed for /var/run/slurmctld.pid" + sed \ + -e "s:/var/run/slurmdbd.pid:${EPREFIX}/run/slurm/slurmdbd.pid:g" \ + -i "${S}/etc/slurmdbd.conf.example" \ + || die "Can't sed for /var/run/slurmdbd.pid" + # gentooify systemd services + sed \ + -e 's:sysconfig/.*:conf.d/slurm:g' \ + -e 's:var/run/:run/slurm/:g' \ + -e '/^EnvironmentFile=.*/d' \ + -i "${S}/etc"/*.service.in \ + || die "Can't sed systemd services for sysconfig or var/run/" + + sed -e '/AM_PATH_GTK_2_0/d' -i configure.ac || die + + hprefixify auxdir/{ax_check_zlib,x_ac_{lz4,ofed,munge}}.m4 + eautoreconf +} + +src_configure() { + local myconf=( + --sysconfdir="${EPREFIX}/etc/${PN}" + --with-hwloc="${EPREFIX}/usr" + --htmldir="${EPREFIX}/usr/share/doc/${PF}" + ) + use pam && myconf+=( --with-pam_dir=$(getpam_mod_dir) ) + use mysql || myconf+=( --without-mysql_config ) + use amd64 && myconf+=( $(use_with netloc) ) + econf "${myconf[@]}" \ + $(use_enable debug) \ + $(use_enable lua) \ + $(use_enable pam) \ + $(use_enable X x11) \ + $(use_with munge) \ + $(use_with json) \ + $(use_with hdf5) \ + $(use_with ofed) \ + $(use_with ucx) \ + $(use_enable static-libs static) \ + $(use_enable multiple-slurmd) + + # --htmldir does not seems to propagate... Documentations are installed + # in /usr/share/doc/slurm-2.3.0/html + # instead of /usr/share/doc/slurm-2.3.0.2/html + sed \ + -e "s|htmldir = .*/html|htmldir = \${prefix}/share/doc/slurm-${PVR}/html|g" \ + -i doc/html/Makefile || die + if use perl ; then + # small hack to make it compile + mkdir -p "${S}/src/api/.libs" || die + mkdir -p "${S}/src/db_api/.libs" || die + touch "${S}/src/api/.libs/libslurm.so" || die + touch "${S}/src/db_api/.libs/libslurmdb.so" || die + cd "${LIBSLURM_PERL_S}" || die + S="${LIBSLURM_PERL_S}" SRC_PREP="no" perl-module_src_configure + cd "${LIBSLURMDB_PERL_S}" || die + S="${LIBSLURMDB_PERL_S}" SRC_PREP="no" perl-module_src_configure + cd "${S}" || die + rm -rf "${S}/src/api/.libs" "${S}/src/db_api/.libs" || die + fi +} + +src_compile() { + default + use pam && emake -C contribs/pam + if use perl ; then + cd "${LIBSLURM_PERL_S}" || die + S="${LIBSLURM_PERL_S}" perl-module_src_compile + cd "${LIBSLURMDB_PERL_S}" || die + S="${LIBSLURMDB_PERL_S}" perl-module_src_compile + cd "${S}" || die + fi + use torque && emake -C contribs/torque +} + +src_install() { + default + use pam && emake DESTDIR="${D}" -C contribs/pam install + if use perl; then + cd "${LIBSLURM_PERL_S}" || die + S="${LIBSLURM_PERL_S}" perl-module_src_install + cd "${LIBSLURMDB_PERL_S}" || die + S="${LIBSLURMDB_PERL_S}" perl-module_src_install + cd "${S}" || die + fi + if use torque; then + emake DESTDIR="${D}" -C contribs/torque + rm -f "${D}"/usr/bin/mpiexec || die + fi + use static-libs || find "${ED}" -name '*.la' -exec rm {} + + # install sample configs + keepdir /etc/slurm + insinto /etc/slurm + doins \ + etc/prolog.example \ + etc/cgroup.conf.example \ + etc/slurm.conf.example \ + etc/slurmdbd.conf.example + exeinto /etc/slurm + keepdir /etc/slurm/layouts.d + # install init.d files + newinitd "$(prefixify_ro "${FILESDIR}/slurmd.initd")" slurmd + newinitd "$(prefixify_ro "${FILESDIR}/slurmctld.initd")" slurmctld + newinitd "$(prefixify_ro "${FILESDIR}/slurmdbd.initd")" slurmdbd + # install conf.d files + newconfd "${FILESDIR}/slurm.confd" slurm + # install logrotate file + insinto /etc/logrotate.d + newins "${FILESDIR}/logrotate" slurm + # install bashcomp + newbashcomp contribs/slurm_completion_help/slurm_completion.sh scontrol + bashcomp_alias scontrol \ + sreport sacctmgr squeue scancel sshare sbcast sinfo \ + sprio sacct salloc sbatch srun sattach sdiag sstat + # install systemd files + systemd_newtmpfilesd "${FILESDIR}/slurm.tmpfiles" slurm.conf + systemd_dounit etc/slurmd.service etc/slurmctld.service etc/slurmdbd.service +} + +pkg_preinst() { + if use munge; then + sed -i 's,\(SLURM_USE_MUNGE=\).*,\11,' "${D}"/etc/conf.d/slurm || die + fi +} + +create_folders_and_fix_permissions() { + einfo "Fixing permissions in ${@}" + mkdir -p ${@} || die + chown -R ${PN}:${PN} ${@} || die +} + +pkg_postinst() { + paths=( + "${EROOT}"/var/${PN}/checkpoint + "${EROOT}"/var/${PN} + "${EROOT}"/var/spool/${PN}/slurmd + "${EROOT}"/var/spool/${PN} + "${EROOT}"/var/log/${PN} + /var/tmp/${PN}/${PN}d + /var/tmp/${PN} + /run/${PN} + ) + local folder_path + for folder_path in ${paths[@]}; do + create_folders_and_fix_permissions $folder_path + done + echo + + elog "Please visit the file '/usr/share/doc/${P}/html/configurator.html" + elog "through a (javascript enabled) browser to create a configureation file." + elog "Copy that file to /etc/slurm/slurm.conf on all nodes (including the headnode) of your cluster." + echo + elog "For cgroup support, please see https://www.schedmd.com/slurmdocs/cgroup.conf.html" + elog "Your kernel must be compiled with the wanted cgroup feature:" + elog " For the proctrack plugin:" + elog " freezer" + elog " For the task plugin:" + elog " cpuset, memory, devices" + elog " For the accounting plugin:" + elog " cpuacct, memory, blkio" + elog "Then, set these options in /etc/slurm/slurm.conf:" + elog " ProctrackType=proctrack/cgroup" + elog " TaskPlugin=task/cgroup" + einfo + ewarn "Paths were created for slurm. Please use these paths in /etc/slurm/slurm.conf:" + for folder_path in ${paths[@]}; do + ewarn " ${folder_path}" + done +} |