From 479921825a5c44a5fbcd5441f00ee98e54db9bac Mon Sep 17 00:00:00 2001 From: V3n3RiX Date: Mon, 25 Dec 2023 13:34:50 +0000 Subject: gentoo auto-resync : 25:12:2023 - 13:34:50 --- sci-libs/datasets/Manifest | 12 +- sci-libs/datasets/datasets-2.11.0-r2.ebuild | 52 ----- sci-libs/datasets/datasets-2.12.0.ebuild | 53 ----- sci-libs/datasets/datasets-2.13.1.ebuild | 59 ----- sci-libs/datasets/datasets-2.14.4.ebuild | 59 ----- sci-libs/datasets/datasets-2.14.7.ebuild | 66 ++++++ .../datasets/files/datasets-2.11.0-tests.patch | 242 --------------------- .../datasets/files/datasets-2.12.0-tests.patch | 242 --------------------- 8 files changed, 68 insertions(+), 717 deletions(-) delete mode 100644 sci-libs/datasets/datasets-2.11.0-r2.ebuild delete mode 100644 sci-libs/datasets/datasets-2.12.0.ebuild delete mode 100644 sci-libs/datasets/datasets-2.13.1.ebuild delete mode 100644 sci-libs/datasets/datasets-2.14.4.ebuild create mode 100644 sci-libs/datasets/datasets-2.14.7.ebuild delete mode 100644 sci-libs/datasets/files/datasets-2.11.0-tests.patch delete mode 100644 sci-libs/datasets/files/datasets-2.12.0-tests.patch (limited to 'sci-libs/datasets') diff --git a/sci-libs/datasets/Manifest b/sci-libs/datasets/Manifest index 5f8ba17d48e0..e4d45d43ed46 100644 --- a/sci-libs/datasets/Manifest +++ b/sci-libs/datasets/Manifest @@ -1,12 +1,4 @@ -AUX datasets-2.11.0-tests.patch 8489 BLAKE2B fc53b22427f96255aa9d74ecc4eaa1e38d14893819927af523fccf496cf0fef7ffabd98a600d21a749eb6e03d8af591ec2bd2cd0bc16c427a6e4953c1d4b52a4 SHA512 dce48c7d93adee9c70278e48dc45c50877132205ce40476212d083043fed9ba0786841cd9df68f91e7df81499c1262ead1b0e923954ad63c97c7e9553f0202c1 -AUX datasets-2.12.0-tests.patch 8572 BLAKE2B 7be2fb4a6f39376749160ed3f73852e98ce5139629e28ac1d3dd8c5357e90221fcfead3ef9e23378a4781d19a1804cc433a086279b4c03d2cf37c4ae74947ebc SHA512 727a6e4f035060f40fe476e4d022d097c69f241044e25a216c85ca21352c45cee189622076f62de3c19d616328e7c68c5c77fc611db5d75505465cf9c35bf7d4 AUX datasets-2.14.4-tests.patch 8616 BLAKE2B 8a65d1315b27658a5f741ebc022c83692252a4833ec4d7b79873799c2bb4bb68534a9e13e7fae1c9a6c051b3615fbb783e6e7885ed93968b31aea6629b4116c4 SHA512 2c6d27c297995466a0aebefa46e86113bdce7d84ea00bb1630549fc379fbb51d66f8f01a8d098d56ec2b26d5200f129460567abdbf6a63d4e2a61372fbfbc6a3 -DIST datasets-2.11.0.gh.tar.gz 2141289 BLAKE2B 0fb471dd6ee5de3831eb6586c4a15e67381262470b72d5ab02ee87dfc7977cb4d40e04da6507049d1e47cb8948cad11988bb7627293b48231e1cd413d2cfb885 SHA512 9ec2274d7978e3dde1b2f8ce78dd65bdf66742bbfee7b8672af46216aeaae3ef5c4604a8a5ea0bdee808f1c362cca9a122c16d2e9a161678148e581e4cd5c863 -DIST datasets-2.12.0.gh.tar.gz 2149274 BLAKE2B 8f188901dfe293ac2b673f37e0d135e01a8f131adf9030ef1815ce2faa7ba0b36faf64a002cae1ced2d3ed5b7f50f43ba5cda90ab9254fd5f66bbfaed6085f3f SHA512 7389a1c6ee8ff4cda39a2c3f52218aa6f4b1cd6b45f48f83bfa2191359a8999d54153120d968b3cf7e5e932f88822783578e3d859dcb20f38fb0d915d88220c9 -DIST datasets-2.13.1.gh.tar.gz 2166516 BLAKE2B 2269434b94145837e491ec6784218f6972df94a558b9067020076fb44dd937a103e3c57dd3761bb0a4cb3c3b6248299ec2a6c3f03c5bd016daaa8957591bf7b6 SHA512 3d2d1aad86b6a472cd6d0e6c661d4730cc0ed1a0fff55c739fc6a0ba68a8f53ae8789029553abd713d0b30648dd020f1880b2d8110c72b5c89a320c2b24f7752 -DIST datasets-2.14.4.gh.tar.gz 2142214 BLAKE2B d4c98a9f29ca748c3c20f32b9a89f053cf6327f56353341ba0073d3b5561ed9aea372d2fa74cadfa8b0f2ba0f6c2e9b3181cca9724719cfe3969f36bbb893f11 SHA512 c3a0701dd83474f4a0d839fe4ef56cfccc9f1d45b6506d44d0f9100bc9dbc90014d16c8e0090dc13f3b2d963bd96af45281bde6e3d7af230467ec7dd26204aa3 -EBUILD datasets-2.11.0-r2.ebuild 1367 BLAKE2B f129b98ae08b8005488fa53ee8d607da83d1a334ee660bda871557db3faab9d754e29b7e486d5dcf3bbb4cc1f042fc6b8e786c13b5cf42a70e8aad088e5642fb SHA512 0875cc6352ea35e3637870493cbc967f9195f6289bfb6bad0e45475a99fdebcab7b48a96a9e0bf23aa4ef5f9a42f916486808d4062ca5b12667ac207b13e667f -EBUILD datasets-2.12.0.ebuild 1413 BLAKE2B 724c604de725621a71ca2d9b2ce9fc6dae60856ba03c443649e9ab991bfee8a8e5ec2f4b2ab9d0170a73864cd2b0ac297a4e59a702ed16cb37711a71246de306 SHA512 b323d37992f40d398f330547ea5f15419d002bf39014a207a297a3e8411615ec146367369be8d1e7366dcae22a0a785ddf61a2e8a823a4f069081d370bdcbc55 -EBUILD datasets-2.13.1.ebuild 1552 BLAKE2B 8a25859177dce337ab70c33c7828c43eeb50f7cb96e182c46aa983e5ead757e4b0caf142a65060466b7eea6dd088ed31f183b9e26e8d95117b56514b37317090 SHA512 ea272cf93cdfd2bbad01bca127051fa94243ae7ecaf026b85aed4b76424e702b03a768526f63aa88d837b0da413f37e68c1465194d478f14b4b018ec4148cc7f -EBUILD datasets-2.14.4.ebuild 1544 BLAKE2B 31e31a85d2c483f90f86195828e5f3eaff531096c26e932d59d637f885fb638fc287cc6a6675ec98d07f135a489bd98159964052104e6049c37766328b71ae95 SHA512 c3b32db313a118c92d9ecb63720b490be5e37e6b936e603170683cee4e837294492a5c20ebf75662c9265dc6f50a28a0eba339b7330bd44b3f6ee65aba332ae0 +DIST datasets-2.14.7.gh.tar.gz 2145270 BLAKE2B b3196f75bd52432091052e63ccfc538072b30bead213c7ddc549724c8efedacdf6bb8934574220ee62e27a48240a769ad5e79c4e39cad92538dc6947f7f9bd2b SHA512 87ecaec34670af5b4879aaa85e730fc4ba376028e7ca033a556aec9ac55156f11252dd130c12dc160d5c3d5618fa8888072e46c7dcc01eed9c0e2e07657b0b74 +EBUILD datasets-2.14.7.ebuild 1847 BLAKE2B c73a345a7d2a304a687fef853a725e6edef6989576414ab15a0432021f1bd159ef734e987b72614dfb3e3a9a2c344a58494deef56f5e252fda2465d8ee7673c9 SHA512 d4f9d04848b740fd73b473399f38cd03c5803217d8ca91d742f4304127bf2994fb4bb2e5723e02c3a5380ea11989964d4ea9ea2fdd0005384878976476871fe5 MISC metadata.xml 379 BLAKE2B 48ebb9e7bfa8b58b0d15b82c4146def465e08cf3212ab4af04129d09c153b67b00d0fa05b94d6af54f643ec3a202f2335d3254b966f49d1394d3c7b9e5da56a5 SHA512 99560decfaa0e438980f372d99257695e9ca9585167d9aba091e0b775c2f8384657ddc017841c8f06f8b568017a54fb9e31da736f3c875da717e154cdce876d1 diff --git a/sci-libs/datasets/datasets-2.11.0-r2.ebuild b/sci-libs/datasets/datasets-2.11.0-r2.ebuild deleted file mode 100644 index a2f4ad26e65b..000000000000 --- a/sci-libs/datasets/datasets-2.11.0-r2.ebuild +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2023 Gentoo Authors -# Distributed under the terms of the GNU General Public License v2 - -EAPI=8 - -DISTUTILS_USE_PEP517=setuptools -PYTHON_COMPAT=( python3_{9..11} ) -DISTUTILS_SINGLE_IMPL=1 -inherit distutils-r1 - -DESCRIPTION="Access and share datasets for Audio, Computer Vision, and NLP tasks" -HOMEPAGE=" - https://pypi.org/project/datasets/ -" -SRC_URI="https://github.com/huggingface/${PN}/archive/refs/tags/${PV}.tar.gz - -> ${P}.gh.tar.gz" -IUSE="test" - -LICENSE="Apache-2.0" -SLOT="0" -KEYWORDS="~amd64" - -RDEPEND=" - ${PYTHON_DEPS} - sci-libs/pytorch[${PYTHON_SINGLE_USEDEP}] - $(python_gen_cond_dep ' - dev-python/absl-py[${PYTHON_USEDEP}] - dev-python/aiohttp[${PYTHON_USEDEP}] - dev-python/fsspec[${PYTHON_USEDEP}] - dev-python/multiprocess[${PYTHON_USEDEP}] - dev-python/pandas[${PYTHON_USEDEP}] - dev-python/pyarrow[${PYTHON_USEDEP},parquet,snappy] - dev-python/tqdm[${PYTHON_USEDEP}] - dev-python/xxhash[${PYTHON_USEDEP}] - dev-python/zstandard[${PYTHON_USEDEP}] - sci-libs/huggingface_hub[${PYTHON_USEDEP}] - sci-libs/scikit-learn[${PYTHON_USEDEP}] - ') -" -DEPEND="${RDEPEND}" -BDEPEND="test? ( - $(python_gen_cond_dep ' - dev-python/pytest-datadir[${PYTHON_USEDEP}] - dev-python/decorator[${PYTHON_USEDEP}] - sci-libs/jiwer[${PYTHON_USEDEP}] - sci-libs/seqeval[${PYTHON_USEDEP}] - ') -)" - -PATCHES=( "${FILESDIR}"/${P}-tests.patch ) - -distutils_enable_tests pytest diff --git a/sci-libs/datasets/datasets-2.12.0.ebuild b/sci-libs/datasets/datasets-2.12.0.ebuild deleted file mode 100644 index 66b609fd2b57..000000000000 --- a/sci-libs/datasets/datasets-2.12.0.ebuild +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2023 Gentoo Authors -# Distributed under the terms of the GNU General Public License v2 - -EAPI=8 - -DISTUTILS_USE_PEP517=setuptools -PYTHON_COMPAT=( python3_{9..11} ) -DISTUTILS_SINGLE_IMPL=1 -inherit distutils-r1 - -DESCRIPTION="Access and share datasets for Audio, Computer Vision, and NLP tasks" -HOMEPAGE=" - https://pypi.org/project/datasets/ -" -SRC_URI="https://github.com/huggingface/${PN}/archive/refs/tags/${PV}.tar.gz - -> ${P}.gh.tar.gz" -IUSE="test" - -LICENSE="Apache-2.0" -SLOT="0" -KEYWORDS="~amd64" - -RDEPEND=" - ${PYTHON_DEPS} - sci-libs/pytorch[${PYTHON_SINGLE_USEDEP}] - $(python_gen_cond_dep ' - dev-python/absl-py[${PYTHON_USEDEP}] - dev-python/aiohttp[${PYTHON_USEDEP}] - dev-python/fsspec[${PYTHON_USEDEP}] - dev-python/multiprocess[${PYTHON_USEDEP}] - dev-python/pandas[${PYTHON_USEDEP}] - dev-python/pyarrow[${PYTHON_USEDEP},parquet,snappy] - dev-python/tqdm[${PYTHON_USEDEP}] - dev-python/xxhash[${PYTHON_USEDEP}] - dev-python/zstandard[${PYTHON_USEDEP}] - sci-libs/huggingface_hub[${PYTHON_USEDEP}] - sci-libs/scikit-learn[${PYTHON_USEDEP}] - ') -" -DEPEND="${RDEPEND}" -BDEPEND="test? ( - $(python_gen_cond_dep ' - dev-python/pytest-datadir[${PYTHON_USEDEP}] - dev-python/decorator[${PYTHON_USEDEP}] - =dev-python/sqlalchemy-1*[${PYTHON_USEDEP}] - sci-libs/jiwer[${PYTHON_USEDEP}] - sci-libs/seqeval[${PYTHON_USEDEP}] - ') -)" - -PATCHES=( "${FILESDIR}"/${P}-tests.patch ) - -distutils_enable_tests pytest diff --git a/sci-libs/datasets/datasets-2.13.1.ebuild b/sci-libs/datasets/datasets-2.13.1.ebuild deleted file mode 100644 index 60a16a43e361..000000000000 --- a/sci-libs/datasets/datasets-2.13.1.ebuild +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2023 Gentoo Authors -# Distributed under the terms of the GNU General Public License v2 - -EAPI=8 - -DISTUTILS_USE_PEP517=setuptools -PYTHON_COMPAT=( python3_{9..11} ) -DISTUTILS_SINGLE_IMPL=1 -inherit distutils-r1 - -DESCRIPTION="Access and share datasets for Audio, Computer Vision, and NLP tasks" -HOMEPAGE=" - https://pypi.org/project/datasets/ -" -SRC_URI="https://github.com/huggingface/${PN}/archive/refs/tags/${PV}.tar.gz - -> ${P}.gh.tar.gz" -IUSE="test" - -LICENSE="Apache-2.0" -SLOT="0" -KEYWORDS="~amd64" - -RDEPEND=" - ${PYTHON_DEPS} - sci-libs/pytorch[${PYTHON_SINGLE_USEDEP}] - $(python_gen_cond_dep ' - dev-python/absl-py[${PYTHON_USEDEP}] - dev-python/aiohttp[${PYTHON_USEDEP}] - dev-python/fsspec[${PYTHON_USEDEP}] - dev-python/multiprocess[${PYTHON_USEDEP}] - dev-python/pandas[${PYTHON_USEDEP}] - dev-python/pyarrow[${PYTHON_USEDEP},parquet,snappy] - dev-python/tqdm[${PYTHON_USEDEP}] - dev-python/xxhash[${PYTHON_USEDEP}] - dev-python/zstandard[${PYTHON_USEDEP}] - sci-libs/huggingface_hub[${PYTHON_USEDEP}] - sci-libs/scikit-learn[${PYTHON_USEDEP}] - ') -" -DEPEND="${RDEPEND}" -BDEPEND="test? ( - $(python_gen_cond_dep ' - dev-python/pytest-datadir[${PYTHON_USEDEP}] - dev-python/decorator[${PYTHON_USEDEP}] - =dev-python/sqlalchemy-1*[${PYTHON_USEDEP}] - sci-libs/jiwer[${PYTHON_USEDEP}] - sci-libs/seqeval[${PYTHON_USEDEP}] - ') -)" - -PATCHES=( "${FILESDIR}"/${PN}-2.12.0-tests.patch ) - -distutils_enable_tests pytest - -src_prepare() { - distutils-r1_src_prepare - rm tests/packaged_modules/test_spark.py || die - rm tests/test_upstream_hub.py || die -} diff --git a/sci-libs/datasets/datasets-2.14.4.ebuild b/sci-libs/datasets/datasets-2.14.4.ebuild deleted file mode 100644 index 08ed796e9c2d..000000000000 --- a/sci-libs/datasets/datasets-2.14.4.ebuild +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2023 Gentoo Authors -# Distributed under the terms of the GNU General Public License v2 - -EAPI=8 - -DISTUTILS_USE_PEP517=setuptools -PYTHON_COMPAT=( python3_{9..11} ) -DISTUTILS_SINGLE_IMPL=1 -inherit distutils-r1 - -DESCRIPTION="Access and share datasets for Audio, Computer Vision, and NLP tasks" -HOMEPAGE=" - https://pypi.org/project/datasets/ -" -SRC_URI="https://github.com/huggingface/${PN}/archive/refs/tags/${PV}.tar.gz - -> ${P}.gh.tar.gz" -IUSE="test" - -LICENSE="Apache-2.0" -SLOT="0" -KEYWORDS="~amd64" - -RDEPEND=" - ${PYTHON_DEPS} - sci-libs/pytorch[${PYTHON_SINGLE_USEDEP}] - $(python_gen_cond_dep ' - dev-python/absl-py[${PYTHON_USEDEP}] - dev-python/aiohttp[${PYTHON_USEDEP}] - dev-python/fsspec[${PYTHON_USEDEP}] - dev-python/multiprocess[${PYTHON_USEDEP}] - dev-python/pandas[${PYTHON_USEDEP}] - dev-python/pyarrow[${PYTHON_USEDEP},parquet,snappy] - dev-python/tqdm[${PYTHON_USEDEP}] - dev-python/xxhash[${PYTHON_USEDEP}] - dev-python/zstandard[${PYTHON_USEDEP}] - sci-libs/huggingface_hub[${PYTHON_USEDEP}] - sci-libs/scikit-learn[${PYTHON_USEDEP}] - ') -" -DEPEND="${RDEPEND}" -BDEPEND="test? ( - $(python_gen_cond_dep ' - dev-python/pytest-datadir[${PYTHON_USEDEP}] - dev-python/decorator[${PYTHON_USEDEP}] - =dev-python/sqlalchemy-1*[${PYTHON_USEDEP}] - sci-libs/jiwer[${PYTHON_USEDEP}] - sci-libs/seqeval[${PYTHON_USEDEP}] - ') -)" - -PATCHES=( "${FILESDIR}"/${P}-tests.patch ) - -distutils_enable_tests pytest - -src_prepare() { - distutils-r1_src_prepare - rm tests/packaged_modules/test_spark.py || die - rm tests/test_upstream_hub.py || die -} diff --git a/sci-libs/datasets/datasets-2.14.7.ebuild b/sci-libs/datasets/datasets-2.14.7.ebuild new file mode 100644 index 000000000000..0fab7cd550c4 --- /dev/null +++ b/sci-libs/datasets/datasets-2.14.7.ebuild @@ -0,0 +1,66 @@ +# Copyright 2023 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=8 + +DISTUTILS_USE_PEP517=setuptools +PYTHON_COMPAT=( python3_{9..11} ) +DISTUTILS_SINGLE_IMPL=1 +inherit distutils-r1 + +DESCRIPTION="Access and share datasets for Audio, Computer Vision, and NLP tasks" +HOMEPAGE=" + https://pypi.org/project/datasets/ +" +SRC_URI="https://github.com/huggingface/${PN}/archive/refs/tags/${PV}.tar.gz + -> ${P}.gh.tar.gz" +IUSE="test" + +LICENSE="Apache-2.0" +SLOT="0" +KEYWORDS="~amd64" + +# For pin on fsspec see https://github.com/huggingface/datasets/issues/6333 +RDEPEND=" + ${PYTHON_DEPS} + sci-libs/pytorch[${PYTHON_SINGLE_USEDEP}] + $(python_gen_cond_dep ' + dev-python/absl-py[${PYTHON_USEDEP}] + dev-python/aiohttp[${PYTHON_USEDEP}] + <=dev-python/fsspec-2023.10.0[${PYTHON_USEDEP}] + dev-python/multiprocess[${PYTHON_USEDEP}] + dev-python/packaging[${PYTHON_USEDEP}] + dev-python/pandas[${PYTHON_USEDEP}] + dev-python/pyarrow[${PYTHON_USEDEP},parquet,snappy] + dev-python/pyyaml[${PYTHON_USEDEP}] + dev-python/tqdm[${PYTHON_USEDEP}] + dev-python/xxhash[${PYTHON_USEDEP}] + dev-python/zstandard[${PYTHON_USEDEP}] + >=sci-libs/huggingface_hub-0.14.0[${PYTHON_USEDEP}] + sci-libs/scikit-learn[${PYTHON_USEDEP}] + ') +" +DEPEND="${RDEPEND}" +BDEPEND="test? ( + $(python_gen_cond_dep ' + dev-python/absl-py[${PYTHON_USEDEP}] + dev-python/pytest-datadir[${PYTHON_USEDEP}] + dev-python/decorator[${PYTHON_USEDEP}] + =dev-python/sqlalchemy-1*[${PYTHON_USEDEP}] + sci-libs/jiwer[${PYTHON_USEDEP}] + sci-libs/seqeval[${PYTHON_USEDEP}] + ') +)" + +PATCHES=( "${FILESDIR}"/${PN}-2.14.4-tests.patch ) + +distutils_enable_tests pytest + +src_prepare() { + distutils-r1_src_prepare + rm tests/packaged_modules/test_spark.py || die + rm tests/test_upstream_hub.py || die + sed -i -e \ + "/pyarrow_hotfix/d" \ + src/datasets/features/features.py || die +} diff --git a/sci-libs/datasets/files/datasets-2.11.0-tests.patch b/sci-libs/datasets/files/datasets-2.11.0-tests.patch deleted file mode 100644 index e105c01bc63b..000000000000 --- a/sci-libs/datasets/files/datasets-2.11.0-tests.patch +++ /dev/null @@ -1,242 +0,0 @@ ---- a/tests/test_metric_common.py 2023-05-04 18:48:48.550861318 +0200 -+++ b/tests/test_metric_common.py 2023-05-04 18:50:25.787364577 +0200 -@@ -93,6 +93,7 @@ - INTENSIVE_CALLS_PATCHER = {} - metric_name = None - -+ @pytest.mark.skip(reason="disabling, depends on bert_score, bleurt, math_equivalence, coval, nltk, faiss, mauve, rouge_score, sacrebleu, sacremoses ...") - def test_load_metric(self, metric_name): - doctest.ELLIPSIS_MARKER = "[...]" - metric_module = importlib.import_module( ---- a/tests/test_hf_gcp.py 2023-05-04 19:33:31.150825303 +0200 -+++ b/tests/test_hf_gcp.py 2023-05-04 19:40:08.401759538 +0200 -@@ -69,6 +69,7 @@ - self.assertTrue(os.path.exists(datset_info_path)) - - -+@pytest.mark.skip(reason="require apache_beam") - @pytest.mark.integration - def test_wikipedia_frr(tmp_path_factory): - tmp_dir = tmp_path_factory.mktemp("test_hf_gcp") / "test_wikipedia_simple" ---- a/tests/test_distributed.py 2023-05-04 19:43:09.861275030 +0200 -+++ b/tests/test_distributed.py 2023-05-04 19:44:17.608326722 +0200 -@@ -55,6 +55,7 @@ - assert len({tuple(x.values()) for ds in datasets_per_rank for x in ds}) == full_size - - -+@pytest.mark.skip(reason="require distributed torch") - @pytest.mark.parametrize("streaming", [False, True]) - @require_torch - @pytest.mark.skipif(os.name == "nt", reason="execute_subprocess_async doesn't support windows") -@@ -76,6 +77,7 @@ - execute_subprocess_async(cmd, env=os.environ.copy()) - - -+@pytest.mark.skip(reason="require distributed torch") - @pytest.mark.parametrize( - "nproc_per_node, num_workers", - [ ---- a/tests/utils.py 2023-05-06 08:43:16.251987543 +0200 -+++ b/tests/utils.py 2023-05-06 08:44:24.467952870 +0200 -@@ -54,8 +54,8 @@ - # Audio - require_sndfile = pytest.mark.skipif( - # On Windows and OS X, soundfile installs sndfile -- find_spec("soundfile") is None or version.parse(importlib_metadata.version("soundfile")) < version.parse("0.12.0"), -- reason="test requires sndfile>=0.12.1: 'pip install \"soundfile>=0.12.1\"'; ", -+ True, -+ reason="test requires librosa", - ) - - # Beam ---- a/tests/features/test_audio.py 2023-05-06 09:03:58.680108142 +0200 -+++ a/tests/features/test_audio.py 2023-05-06 09:05:50.463407967 +0200 -@@ -57,6 +57,7 @@ - assert features.arrow_schema == pa.schema({"sequence_of_audios": pa.list_(Audio().pa_type)}) - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_example", - [ -@@ -81,6 +82,7 @@ - assert decoded_example.keys() == {"path", "array", "sampling_rate"} - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_example", - [ -@@ -148,6 +149,7 @@ - assert decoded_example["sampling_rate"] == 48000 - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize("sampling_rate", [16_000, 48_000]) - def test_audio_decode_example_pcm(shared_datadir, sampling_rate): - audio_path = str(shared_datadir / "test_audio_16000.pcm") -@@ -414,6 +417,7 @@ - assert column[0]["sampling_rate"] == 16000 - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_data", - [ -@@ -438,6 +442,7 @@ - assert item["audio"].keys() == {"path", "array", "sampling_rate"} - - -+@pytest.mark.skip(reason="require librosa") - def test_dataset_concatenate_audio_features(shared_datadir): - # we use a different data structure between 1 and 2 to make sure they are compatible with each other - audio_path = str(shared_datadir / "test_audio_44100.wav") -@@ -451,6 +456,7 @@ - assert concatenated_dataset[1]["audio"]["array"].shape == dset2[0]["audio"]["array"].shape - - -+@pytest.mark.skip(reason="require librosa") - def test_dataset_concatenate_nested_audio_features(shared_datadir): - # we use a different data structure between 1 and 2 to make sure they are compatible with each other - audio_path = str(shared_datadir / "test_audio_44100.wav") -@@ -610,6 +616,7 @@ - assert isinstance(ds, Dataset) - - -+@require_sndfile - def test_dataset_with_audio_feature_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} -@@ -627,6 +634,7 @@ - assert column[0] == {"path": audio_path, "bytes": None} - - -+@require_sndfile - def test_formatted_dataset_with_audio_feature_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} -@@ -658,6 +666,7 @@ - assert column[0] == {"path": audio_path, "bytes": None} - - -+@require_sndfile - def test_dataset_with_audio_feature_map_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} ---- a/tests/packaged_modules/test_audiofolder.py 2023-05-06 14:00:39.560876163 +0200 -+++ b/tests/packaged_modules/test_audiofolder.py 2023-05-06 14:01:26.005212423 +0200 -@@ -1,10 +1,8 @@ - import shutil - import textwrap - --import librosa - import numpy as np - import pytest --import soundfile as sf - - from datasets import Audio, ClassLabel, Features, Value - from datasets.data_files import DataFilesDict, get_data_patterns_locally -@@ -192,8 +190,11 @@ - return data_files_with_two_splits_and_metadata - - -+@pytest.mark.skip(reason="require soundfile") - @pytest.fixture - def data_files_with_zip_archives(tmp_path, audio_file): -+ import soundfile as sf -+ import librosa - data_dir = tmp_path / "audiofolder_data_dir_with_zip_archives" - data_dir.mkdir(parents=True, exist_ok=True) - archive_dir = data_dir / "archive" ---- a/tests/test_arrow_dataset.py 2023-05-06 15:36:11.080459079 +0200 -+++ b/tests/test_arrow_dataset.py 2023-05-06 15:38:07.452828528 +0200 -@@ -3928,6 +3928,7 @@ - ) - self.assertDictEqual(features_after_cast, dset.features) - -+ @pytest.mark.skip(reason="require soundfile") - def test_task_automatic_speech_recognition(self): - # Include a dummy extra column `dummy` to test we drop it correctly - features_before_cast = Features( ---- a/tests/test_streaming_download_manager.py 2023-05-15 23:06:59.146379973 +0200 -+++ b/tests/test_streaming_download_manager.py 2023-05-15 23:11:32.441363757 +0200 -@@ -217,6 +217,7 @@ - assert output_path == _readd_double_slash_removed_by_path(Path(expected_path).as_posix()) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, exists", - [ -@@ -299,6 +300,7 @@ - assert list(f) == TEST_URL_CONTENT.splitlines(keepends=True) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_paths", - [ -@@ -328,6 +330,7 @@ - xlistdir(root_url, use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, isdir", - [ -@@ -355,6 +358,7 @@ - xisdir(root_url, use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, isfile", - [ -@@ -378,6 +382,7 @@ - assert xisfile(root_url + "qwertyuiop", use_auth_token=hf_token) is False - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, size", - [ -@@ -402,6 +407,7 @@ - xgetsize(root_url + "qwertyuiop", use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_paths", - [ -@@ -444,6 +450,7 @@ - assert len(xglob("zip://qwertyuiop/*::" + root_url, use_auth_token=hf_token)) == 0 - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_outputs", - [ -@@ -533,6 +540,7 @@ - def test_xpath_as_posix(self, input_path, expected_path): - assert xPath(input_path).as_posix() == expected_path - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, exists", - [ -@@ -548,6 +556,7 @@ - (tmp_path / "file.txt").touch() - assert xexists(input_path) is exists - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, pattern, expected_paths", - [ -@@ -586,6 +595,7 @@ - output_paths = sorted(xPath(input_path).glob(pattern)) - assert output_paths == expected_paths - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, pattern, expected_paths", - [ diff --git a/sci-libs/datasets/files/datasets-2.12.0-tests.patch b/sci-libs/datasets/files/datasets-2.12.0-tests.patch deleted file mode 100644 index 6be3156bb70d..000000000000 --- a/sci-libs/datasets/files/datasets-2.12.0-tests.patch +++ /dev/null @@ -1,242 +0,0 @@ ---- a/tests/test_metric_common.py 2023-05-04 18:48:48.550861318 +0200 -+++ b/tests/test_metric_common.py 2023-05-04 18:50:25.787364577 +0200 -@@ -93,6 +93,7 @@ - INTENSIVE_CALLS_PATCHER = {} - metric_name = None - -+ @pytest.mark.skip(reason="disabling, depends on bert_score, bleurt, math_equivalence, coval, nltk, faiss, mauve, rouge_score, sacrebleu, sacremoses ...") - @pytest.mark.filterwarnings("ignore:metric_module_factory is deprecated:FutureWarning") - @pytest.mark.filterwarnings("ignore:load_metric is deprecated:FutureWarning") - def test_load_metric(self, metric_name): ---- a/tests/test_hf_gcp.py 2023-05-04 19:33:31.150825303 +0200 -+++ b/tests/test_hf_gcp.py 2023-05-04 19:40:08.401759538 +0200 -@@ -75,6 +75,7 @@ - self.assertTrue(os.path.exists(datset_info_path)) - - -+@pytest.mark.skip(reason="require apache_beam") - @pytest.mark.integration - def test_as_dataset_from_hf_gcs(tmp_path_factory): - tmp_dir = tmp_path_factory.mktemp("test_hf_gcp") / "test_wikipedia_simple" ---- a/tests/test_distributed.py 2023-05-04 19:43:09.861275030 +0200 -+++ b/tests/test_distributed.py 2023-05-04 19:44:17.608326722 +0200 -@@ -74,6 +74,7 @@ - split_dataset_by_node(full_ds.shuffle(), rank=0, world_size=world_size) - - -+@pytest.mark.skip(reason="require distributed torch") - @pytest.mark.parametrize("streaming", [False, True]) - @require_torch - @pytest.mark.skipif(os.name == "nt", reason="execute_subprocess_async doesn't support windows") -@@ -95,6 +96,7 @@ - execute_subprocess_async(cmd, env=os.environ.copy()) - - -+@pytest.mark.skip(reason="require distributed torch") - @pytest.mark.parametrize( - "nproc_per_node, num_workers", - [ ---- a/tests/utils.py 2023-05-06 08:43:16.251987543 +0200 -+++ b/tests/utils.py 2023-05-06 08:44:24.467952870 +0200 -@@ -55,8 +55,8 @@ - # Audio - require_sndfile = pytest.mark.skipif( - # On Windows and OS X, soundfile installs sndfile -- find_spec("soundfile") is None or version.parse(importlib_metadata.version("soundfile")) < version.parse("0.12.0"), -- reason="test requires sndfile>=0.12.1: 'pip install \"soundfile>=0.12.1\"'; ", -+ True, -+ reason="test requires librosa", - ) - - # Beam ---- a/tests/features/test_audio.py 2023-05-06 09:03:58.680108142 +0200 -+++ a/tests/features/test_audio.py 2023-05-06 09:05:50.463407967 +0200 -@@ -57,6 +57,7 @@ - assert features.arrow_schema == pa.schema({"sequence_of_audios": pa.list_(Audio().pa_type)}) - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_example", - [ -@@ -81,6 +82,7 @@ - assert decoded_example.keys() == {"path", "array", "sampling_rate"} - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_example", - [ -@@ -148,6 +149,7 @@ - assert decoded_example["sampling_rate"] == 48000 - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize("sampling_rate", [16_000, 48_000]) - def test_audio_decode_example_pcm(shared_datadir, sampling_rate): - audio_path = str(shared_datadir / "test_audio_16000.pcm") -@@ -414,6 +417,7 @@ - assert column[0]["sampling_rate"] == 16000 - - -+@pytest.mark.skip(reason="require librosa") - @pytest.mark.parametrize( - "build_data", - [ -@@ -438,6 +442,7 @@ - assert item["audio"].keys() == {"path", "array", "sampling_rate"} - - -+@pytest.mark.skip(reason="require librosa") - def test_dataset_concatenate_audio_features(shared_datadir): - # we use a different data structure between 1 and 2 to make sure they are compatible with each other - audio_path = str(shared_datadir / "test_audio_44100.wav") -@@ -451,6 +456,7 @@ - assert concatenated_dataset[1]["audio"]["array"].shape == dset2[0]["audio"]["array"].shape - - -+@pytest.mark.skip(reason="require librosa") - def test_dataset_concatenate_nested_audio_features(shared_datadir): - # we use a different data structure between 1 and 2 to make sure they are compatible with each other - audio_path = str(shared_datadir / "test_audio_44100.wav") -@@ -610,6 +616,7 @@ - assert isinstance(ds, Dataset) - - -+@require_sndfile - def test_dataset_with_audio_feature_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} -@@ -627,6 +634,7 @@ - assert column[0] == {"path": audio_path, "bytes": None} - - -+@require_sndfile - def test_formatted_dataset_with_audio_feature_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} -@@ -658,6 +666,7 @@ - assert column[0] == {"path": audio_path, "bytes": None} - - -+@require_sndfile - def test_dataset_with_audio_feature_map_undecoded(shared_datadir): - audio_path = str(shared_datadir / "test_audio_44100.wav") - data = {"audio": [audio_path]} ---- a/tests/packaged_modules/test_audiofolder.py 2023-05-06 14:00:39.560876163 +0200 -+++ b/tests/packaged_modules/test_audiofolder.py 2023-05-06 14:01:26.005212423 +0200 -@@ -1,10 +1,8 @@ - import shutil - import textwrap - --import librosa - import numpy as np - import pytest --import soundfile as sf - - from datasets import Audio, ClassLabel, Features, Value - from datasets.data_files import DataFilesDict, get_data_patterns_locally -@@ -192,8 +190,11 @@ - return data_files_with_two_splits_and_metadata - - -+@pytest.mark.skip(reason="require soundfile") - @pytest.fixture - def data_files_with_zip_archives(tmp_path, audio_file): -+ import soundfile as sf -+ import librosa - data_dir = tmp_path / "audiofolder_data_dir_with_zip_archives" - data_dir.mkdir(parents=True, exist_ok=True) - archive_dir = data_dir / "archive" ---- a/tests/test_arrow_dataset.py 2023-05-06 15:36:11.080459079 +0200 -+++ b/tests/test_arrow_dataset.py 2023-05-06 15:38:07.452828528 +0200 -@@ -3983,6 +3983,7 @@ - ) - self.assertDictEqual(features_after_cast, dset.features) - -+ @pytest.mark.skip(reason="require soundfile") - def test_task_automatic_speech_recognition(self): - # Include a dummy extra column `dummy` to test we drop it correctly - features_before_cast = Features( ---- a/tests/test_streaming_download_manager.py 2023-05-15 23:06:59.146379973 +0200 -+++ b/tests/test_streaming_download_manager.py 2023-05-15 23:11:32.441363757 +0200 -@@ -217,6 +217,7 @@ - assert output_path == _readd_double_slash_removed_by_path(Path(expected_path).as_posix()) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, exists", - [ -@@ -299,6 +300,7 @@ - assert list(f) == TEST_URL_CONTENT.splitlines(keepends=True) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_paths", - [ -@@ -328,6 +330,7 @@ - xlistdir(root_url, use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, isdir", - [ -@@ -355,6 +358,7 @@ - xisdir(root_url, use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, isfile", - [ -@@ -378,6 +382,7 @@ - assert xisfile(root_url + "qwertyuiop", use_auth_token=hf_token) is False - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, size", - [ -@@ -402,6 +407,7 @@ - xgetsize(root_url + "qwertyuiop", use_auth_token=hf_token) - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_paths", - [ -@@ -444,6 +450,7 @@ - assert len(xglob("zip://qwertyuiop/*::" + root_url, use_auth_token=hf_token)) == 0 - - -+@pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, expected_outputs", - [ -@@ -533,6 +540,7 @@ - def test_xpath_as_posix(self, input_path, expected_path): - assert xPath(input_path).as_posix() == expected_path - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, exists", - [ -@@ -548,6 +556,7 @@ - (tmp_path / "file.txt").touch() - assert xexists(input_path) is exists - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, pattern, expected_paths", - [ -@@ -586,6 +595,7 @@ - output_paths = sorted(xPath(input_path).glob(pattern)) - assert output_paths == expected_paths - -+ @pytest.mark.skip(reason="not working in sandbox") - @pytest.mark.parametrize( - "input_path, pattern, expected_paths", - [ -- cgit v1.2.3