diff options
Diffstat (limited to 'dev-python/tables')
-rw-r--r-- | dev-python/tables/Manifest | 3 | ||||
-rw-r--r-- | dev-python/tables/files/tables-3.9.2-numpy-2.patch | 264 | ||||
-rw-r--r-- | dev-python/tables/files/tables-3.9.2-py313.patch | 73 | ||||
-rw-r--r-- | dev-python/tables/tables-3.9.2-r1.ebuild | 89 |
4 files changed, 429 insertions, 0 deletions
diff --git a/dev-python/tables/Manifest b/dev-python/tables/Manifest index 17ea91a5bc97..5a62a7cafdd5 100644 --- a/dev-python/tables/Manifest +++ b/dev-python/tables/Manifest @@ -1,3 +1,6 @@ +AUX tables-3.9.2-numpy-2.patch 10742 BLAKE2B 6c172a961e25d610b4f3cb72277512bcf8b493c5eec3122cd8a609ba6b8fef4b79d4c0c6b386b82cccabc8beeca54c05bd9057f655c2b5bb4f811a4ff747e639 SHA512 045fb1f3c2cc713f2e39aa6176190923a3441f473801f4258f2642324d2677f927bc34164911b5fee6ed88f4ed5589fafea0e113a5d426ffdc86eb46ce27fe0b +AUX tables-3.9.2-py313.patch 2674 BLAKE2B 5735b61bc189b3f09dda468e7e371db8b57a3dd49fd3363865afc9c9676b414d46b0b41e745aa6d90fca20ab43e0e778bf0638cb879c3c2a8efe444ed8348fb6 SHA512 7f649c331be6d6f096564d17704f738e3872a77272ea2561e83fcb8f778769dfdde1ebaeaa9d3d34d640954fb53056436bf513d2c52c33929937f704c68d546e DIST tables-3.9.2.tar.gz 4683437 BLAKE2B 7044aede85d9eca67260a309d19b5c80944b80b2107f665296ad7ae6a3c3f9a8717a41ae7298a5ae45e5b9de7ae0a6678a83d4bd914bd8709512333e783367bc SHA512 9b416222304b7798585a20d4d7d61934023f151d4262a58a4f0ee969aa365264270c12a734461a194d2c857a13a8e09fb7a1386042267113f601560c041cecd9 +EBUILD tables-3.9.2-r1.ebuild 2289 BLAKE2B aff4c3b62b2ddd3346472d71ed4a69b63ccc286a32bf16630a772cf1e5e75a526719dd0207247d39b6adae3029da496fd045494bb73e7897692d260c345128cc SHA512 d4b87c6568ffe99846af30ee2ba59e57c6427706baffbbdbb2a29073b32df58504a21ae5abfcc0aba0543b284aba71d20c7dd2cf7b3f5c24b9b9431d062f4b32 EBUILD tables-3.9.2.ebuild 1734 BLAKE2B 20097826578ebb74e9fdd91c7b2202079f6ede930d05652c15436204dad91200bd239f0289a4043c45e22ef2b73bb5873b19b1392f57dec20cc7f90ec03bb188 SHA512 c60dc44e6f9ea4d5f2af6b3f77166b8c45887b6f4bd3beb914fb838b233f9044c848e4804a2a68c34d417c4ca75c0d3e9629ee896e767ad3a1f2f7f533669e26 MISC metadata.xml 1090 BLAKE2B cc213079beb7f2888b5eab4886711fe8a4b7a4bc3cf13ff4d2247bd04edc05fdaed19c464ae28beac27de511ea997bcc873866bf9d544d6efe8cacd095e8640a SHA512 bd322eade443a3b1b610d262a0a6a30088246b3f772624fdfbf6eac85f240deb3f9001be1e2e20e334e65701c24e1d57100c67864d14067b1ae16e99f2136e8a diff --git a/dev-python/tables/files/tables-3.9.2-numpy-2.patch b/dev-python/tables/files/tables-3.9.2-numpy-2.patch new file mode 100644 index 000000000000..a3d56d13f1a8 --- /dev/null +++ b/dev-python/tables/files/tables-3.9.2-numpy-2.patch @@ -0,0 +1,264 @@ +diff --git a/setup.py b/setup.py +index 48dc6ca5..e8957c1f 100755 +--- a/setup.py ++++ b/setup.py +@@ -736,7 +736,10 @@ if __name__ == "__main__": + + # ----------------------------------------------------------------- + +- def_macros = [("NDEBUG", 1)] ++ def_macros = [ ++ ("NDEBUG", 1), ++ ("NPY_TARGET_VERSION", "NPY_1_20_API_VERSION"), ++ ] + + # Define macros for Windows platform + if os.name == "nt": +diff --git a/src/utils.c b/src/utils.c +index 15fce02d..b28dbc90 100644 +--- a/src/utils.c ++++ b/src/utils.c +@@ -765,8 +765,8 @@ hid_t create_ieee_complex64(const char *byteorder) { + return float_id; + } + +- H5Tinsert(complex_id, "r", HOFFSET(npy_complex64, real), float_id); +- H5Tinsert(complex_id, "i", HOFFSET(npy_complex64, imag), float_id); ++ H5Tinsert(complex_id, "r", 0, float_id); ++ H5Tinsert(complex_id, "i", 4, float_id); + H5Tclose(float_id); + return complex_id; + } +@@ -790,8 +790,8 @@ hid_t create_ieee_complex128(const char *byteorder) { + return float_id; + } + +- H5Tinsert(complex_id, "r", HOFFSET(npy_complex128, real), float_id); +- H5Tinsert(complex_id, "i", HOFFSET(npy_complex128, imag), float_id); ++ H5Tinsert(complex_id, "r", 0, float_id); ++ H5Tinsert(complex_id, "i", 8, float_id); + H5Tclose(float_id); + return complex_id; + } +@@ -822,8 +822,8 @@ hid_t create_ieee_complex192(const char *byteorder) { + return err; + } + +- H5Tinsert(complex_id, "r", HOFFSET(npy_complex192, real), float_id); +- H5Tinsert(complex_id, "i", HOFFSET(npy_complex192, imag), float_id); ++ H5Tinsert(complex_id, "r", 0, float_id); ++ H5Tinsert(complex_id, "i", 12, float_id); + H5Tclose(float_id); + return complex_id; + } +@@ -854,8 +854,8 @@ hid_t create_ieee_complex256(const char *byteorder) { + return err; + } + +- H5Tinsert(complex_id, "r", HOFFSET(npy_complex256, real), float_id); +- H5Tinsert(complex_id, "i", HOFFSET(npy_complex256, imag), float_id); ++ H5Tinsert(complex_id, "r", 0, float_id); ++ H5Tinsert(complex_id, "i", 16, float_id); + H5Tclose(float_id); + return complex_id; + } +diff --git a/tables/atom.py b/tables/atom.py +index 56ab6423..5d4fba2b 100644 +--- a/tables/atom.py ++++ b/tables/atom.py +@@ -276,15 +276,15 @@ class Atom(metaclass=MetaAtom): + >>> atom1 = StringAtom(itemsize=10) # same as ``atom2`` + >>> atom2 = Atom.from_kind('string', 10) # same as ``atom1`` + >>> atom3 = IntAtom() +- >>> atom1 == 'foo' ++ >>> bool(atom1 == 'foo') + False +- >>> atom1 == atom2 ++ >>> bool(atom1 == atom2) + True +- >>> atom2 != atom1 ++ >>> bool(atom2 != atom1) + False +- >>> atom1 == atom3 ++ >>> bool(atom1 == atom3) + False +- >>> atom3 != atom2 ++ >>> bool(atom3 != atom2) + True + + """ +diff --git a/tables/index.py b/tables/index.py +index e8c8caf7..28ff37e2 100644 +--- a/tables/index.py ++++ b/tables/index.py +@@ -581,7 +581,8 @@ class Index(NotLoggedMixin, Group, indexesextension.Index): + # Add a second offset in this case + # First normalize the number of rows + offset2 = (nrow % self.nslicesblock) * slicesize // lbucket +- idx += offset2 ++ assert offset2 < 2**(indsize*8) ++ idx += np.asarray(offset2).astype(idx.dtype) + # Add the last row at the beginning of arr & idx (if needed) + if (indsize == 8 and nelementsILR > 0): + # It is possible that the values in LR are already sorted. +@@ -622,11 +623,11 @@ class Index(NotLoggedMixin, Group, indexesextension.Index): + show_stats("Entering final_idx32", tref) + # Do an upcast first in order to add the offset. + idx = idx.astype('uint64') +- idx += offset ++ idx += np.asarray(offset).astype(idx.dtype) + # The next partition is valid up to table sizes of + # 2**30 * 2**18 = 2**48 bytes, that is, 256 Tera-elements, + # which should be a safe figure, at least for a while. +- idx //= self.lbucket ++ idx //= np.asarray(self.lbucket).astype(idx.dtype) + # After the division, we can downsize the indexes to 'uint32' + idx = idx.astype('uint32') + if profile: +@@ -2002,7 +2003,7 @@ class Index(NotLoggedMixin, Group, indexesextension.Index): + else: + self.indicesLR._read_index_slice(start, stop, idx) + if indsize == 8: +- idx //= lbucket ++ idx //= np.asarray(lbucket).astype(idx.dtype) + elif indsize == 2: + # The chunkmap size cannot be never larger than 'int_' + idx = idx.astype("int_") +diff --git a/tables/tests/common.py b/tables/tests/common.py +index 31378a88..1992f39a 100644 +--- a/tables/tests/common.py ++++ b/tables/tests/common.py +@@ -205,7 +205,7 @@ def allequal(a, b, flavor="numpy"): + return result + + +-def areArraysEqual(arr1, arr2): ++def areArraysEqual(arr1, arr2, *, check_type=True): + """Are both `arr1` and `arr2` equal arrays? + + Arguments can be regular NumPy arrays, chararray arrays or +@@ -217,8 +217,8 @@ def areArraysEqual(arr1, arr2): + t1 = type(arr1) + t2 = type(arr2) + +- if not ((hasattr(arr1, 'dtype') and arr1.dtype == arr2.dtype) or +- issubclass(t1, t2) or issubclass(t2, t1)): ++ if check_type and not ((hasattr(arr1, 'dtype') and arr1.dtype == arr2.dtype) or ++ issubclass(t1, t2) or issubclass(t2, t1)): + return False + + return np.all(arr1 == arr2) +diff --git a/tables/tests/test_expression.py b/tables/tests/test_expression.py +index 018d4208..d9c0e990 100644 +--- a/tables/tests/test_expression.py ++++ b/tables/tests/test_expression.py +@@ -265,9 +265,12 @@ class MixedContainersTestCase(common.TempFileMixin, common.PyTablesTestCase): + if common.verbose: + print("Computed expression:", repr(r1), r1.dtype) + print("Should look like:", repr(r2), r2.dtype) +- self.assertTrue( +- r1.shape == r2.shape and r1.dtype == r2.dtype and r1 == r2, +- "Evaluate is returning a wrong value.") ++ msg = f"Evaluate is returning a wrong value: {expr_str}\n{r1=}\n{r2=}" ++ self.assertEqual(r1.shape, r2.shape, msg=msg) ++ # In something like 2 * np.in16(3) + np.int16(2) the result is still a ++ # np.int16 in NumPy 2.0, so we shouldn't actually check this: ++ # self.assertEqual(r1.dtype, r2.dtype, msg=msg) ++ self.assertEqual(r1, r2, msg=msg) + + def test01a_out(self): + """Checking expressions with mixed objects (`out` param)""" +@@ -305,8 +308,9 @@ class MixedContainersTestCase(common.TempFileMixin, common.PyTablesTestCase): + if common.verbose: + print("Computed expression:", repr(r1), r1.dtype) + print("Should look like:", repr(r2), r2.dtype) +- self.assertTrue(common.areArraysEqual(r1, r2), +- "Evaluate is returning a wrong value.") ++ msg = f"Evaluate is returning a wrong value: {expr_str}\n{r1=}\n{r2=}" ++ # On NumPy 2 type promotion is different so don't check type here ++ self.assertTrue(common.areArraysEqual(r1, r2, check_type=False), msg=msg) + + def test02a_sss(self): + """Checking mixed objects and start, stop, step (I)""" +diff --git a/tables/tests/test_indexvalues.py b/tables/tests/test_indexvalues.py +index fac33af8..85ca38c3 100644 +--- a/tables/tests/test_indexvalues.py ++++ b/tables/tests/test_indexvalues.py +@@ -2296,6 +2296,11 @@ class SelectValuesTestCase(common.TempFileMixin, common.PyTablesTestCase): + self.assertFalse(t1var3.index.dirty) + self.assertFalse(t1var4.index.dirty) + ++ # TODO: IT IS DIRTY BECAUSE THIS FIXES THINGS FOR FINSV2aTestCase, ++ # which otherwise fails a test a few lines below! ++ for col in table1.colinstances.values(): ++ col.reindex() ++ + # Do some selections and check the results + # First selection: string + # Convert the limits to the appropriate type +@@ -2318,11 +2323,15 @@ class SelectValuesTestCase(common.TempFileMixin, common.PyTablesTestCase): + # Second selection: bool + results1 = [p["var2"] for p in table1.where('t1var2 == True')] + results2 = [p["var2"] for p in table2 if p["var2"] is True] +- if common.verbose: +- print("Length results:", len(results1)) +- print("Should be:", len(results2)) +- self.assertEqual(len(results1), len(results2)) +- self.assertEqual(results1, results2) ++ t2var1_vals = [p["var1"] for p in table2] ++ t2var2_vals = [p["var2"] for p in table2] ++ msg = ( ++ f"Incorrect results for t1var2[n] == True where\n" ++ f"t2var1_vals={repr(t2var1_vals)}\nt2var2_vals={repr(t2var2_vals)}\n" ++ f"\n{results1=}\n{results2=}" ++ ) ++ self.assertEqual(len(results1), len(results2), msg=msg) ++ self.assertEqual(results1, results2, msg=msg) + + # Third selection: int + # Convert the limits to the appropriate type +@@ -3228,7 +3237,9 @@ class LastRowReuseBuffers(common.PyTablesTestCase): + + + normal_tests = ( +- "SV1aTestCase", "SV2aTestCase", "SV3aTestCase", ++ "SV1aTestCase", ++ "SV2aTestCase", ++ "SV3aTestCase", + ) + + heavy_tests = ( +diff --git a/tables/utils.py b/tables/utils.py +index e11e5ba7..7d786e32 100644 +--- a/tables/utils.py ++++ b/tables/utils.py +@@ -25,6 +25,11 @@ byteorders = { + SizeType = np.int64 + + ++copy_if_needed = ( ++ None if np.lib.NumpyVersion(np.__version__) >= "2.0.0" else False ++) ++ ++ + def correct_byteorder(ptype, byteorder): + """Fix the byteorder depending on the PyTables types.""" + +@@ -78,7 +83,7 @@ def idx2long(index): + # with atom from a generic python type. If copy is stated as True, it + # is assured that it will return a copy of the object and never the same + # object or a new one sharing the same memory. +-def convert_to_np_atom(arr, atom, copy=False): ++def convert_to_np_atom(arr, atom, copy=copy_if_needed): + """Convert a generic object into a NumPy object compliant with atom.""" + + # First, convert the object into a NumPy array +@@ -112,7 +117,7 @@ def convert_to_np_atom2(object, atom): + + # Check whether the object needs to be copied to make the operation + # safe to in-place conversion. +- copy = atom.type in ['time64'] ++ copy = True if atom.type in ['time64'] else copy_if_needed + nparr = convert_to_np_atom(object, atom, copy) + # Finally, check the byteorder and change it if needed + byteorder = byteorders[nparr.dtype.byteorder] diff --git a/dev-python/tables/files/tables-3.9.2-py313.patch b/dev-python/tables/files/tables-3.9.2-py313.patch new file mode 100644 index 000000000000..660c5615765f --- /dev/null +++ b/dev-python/tables/files/tables-3.9.2-py313.patch @@ -0,0 +1,73 @@ +From 4a1b480e7e3758cf2cf06354ec5720020db16ce7 Mon Sep 17 00:00:00 2001 +From: Antonio Valentino <antonio.valentino@tiscali.it> +Date: Sun, 19 May 2024 17:39:47 +0200 +Subject: [PATCH] Fix compatibility with Python v3.13 (Closes: #1166) + +The unittest.makeSuite function is not available in Python 3.13. +--- +diff --git a/tables/tests/test_aux.py b/tables/tests/test_aux.py +index 95f34ee16..cf4f022de 100644 +--- a/tables/tests/test_aux.py ++++ b/tables/tests/test_aux.py +@@ -2,6 +2,7 @@ + import numpy as np + + import tables as tb ++from tables.tests.common import make_suite + + + class TestAuxiliaryFunctions(unittest.TestCase): +diff --git a/tables/nodes/tests/test_filenode.py b/tables/nodes/tests/test_filenode.py +index c2754218a..3572cc38f 100644 +--- a/tables/nodes/tests/test_filenode.py ++++ b/tables/nodes/tests/test_filenode.py +@@ -9,7 +9,7 @@ + from ... import open_file, file, NoSuchNodeError + from ...nodes import filenode + from ...tests.common import ( +- unittest, TempFileMixin, parse_argv, print_versions, ++ unittest, TempFileMixin, parse_argv, print_versions, make_suite, + PyTablesTestCase as TestCase) + + +diff --git a/tables/tests/common.py b/tables/tests/common.py +index 31378a880..918b17247 100644 +--- a/tables/tests/common.py ++++ b/tables/tests/common.py +@@ -366,3 +366,10 @@ def test00(self): + print(f"VmSize: {vmsize:>7} kB\tVmRSS: {vmrss:>7} kB") + print(f"VmData: {vmdata:>7} kB\tVmStk: {vmstk:>7} kB") + print(f"VmExe: {vmexe:>7} kB\tVmLib: {vmlib:>7} kB") ++ ++ ++try: ++ from unittest import makeSuite as make_suite ++except ImportError: ++ def make_suite(test_case_class): ++ return unittest.TestLoader().loadTestsFromTestCase(test_case_class) +From 424784895b0fb15ad06707ce60f9829cef4f11e2 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz> +Date: Mon, 3 Jun 2024 17:21:38 +0200 +Subject: [PATCH] Make tables.tests.common.make_suite() accept the prefix + argument + +...as test_queries.py uses it. +--- + tables/tests/common.py | 7 +++++-- + 1 file changed, 5 insertions(+), 2 deletions(-) + +diff --git a/tables/tests/common.py b/tables/tests/common.py +index 918b17247..1d2e5feab 100644 +--- a/tables/tests/common.py ++++ b/tables/tests/common.py +@@ -371,5 +371,8 @@ def test00(self): + try: + from unittest import makeSuite as make_suite + except ImportError: +- def make_suite(test_case_class): +- return unittest.TestLoader().loadTestsFromTestCase(test_case_class) ++ def make_suite(test_case_class, *, prefix=None): ++ loader = unittest.TestLoader() ++ if prefix: ++ loader.testMethodPrefix = prefix ++ return loader.loadTestsFromTestCase(test_case_class) diff --git a/dev-python/tables/tables-3.9.2-r1.ebuild b/dev-python/tables/tables-3.9.2-r1.ebuild new file mode 100644 index 000000000000..0f77247031d8 --- /dev/null +++ b/dev-python/tables/tables-3.9.2-r1.ebuild @@ -0,0 +1,89 @@ +# Copyright 1999-2024 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=8 + +DISTUTILS_EXT=1 +DISTUTILS_USE_PEP517=setuptools +PYTHON_COMPAT=( pypy3 python3_{10..13} ) +PYTHON_REQ_USE="threads(+)" + +inherit distutils-r1 prefix pypi + +DESCRIPTION="Hierarchical datasets for Python" +HOMEPAGE=" + https://www.pytables.org/ + https://github.com/PyTables/PyTables/ + https://pypi.org/project/tables/ +" + +LICENSE="BSD" +SLOT="0" +KEYWORDS="~amd64 ~arm ~arm64 ~ia64 ~loong ~ppc64 ~riscv ~sparc ~x86 ~amd64-linux ~x86-linux" +IUSE="+cpudetection examples test" +RESTRICT="!test? ( test )" + +DEPEND=" + app-arch/bzip2:0= + app-arch/lz4:0= + >=app-arch/zstd-1.0.0:= + >=dev-libs/c-blosc-1.11.1:0= + >=dev-libs/c-blosc2-2.11.0:= + dev-libs/lzo:2= + >=dev-python/numpy-1.19.0:=[${PYTHON_USEDEP}] + >=sci-libs/hdf5-1.8.4:= +" +RDEPEND=" + ${DEPEND} + >=dev-python/numexpr-2.6.2[${PYTHON_USEDEP}] + dev-python/packaging[${PYTHON_USEDEP}] + cpudetection? ( dev-python/py-cpuinfo[${PYTHON_USEDEP}] ) +" +BDEPEND=" + >=dev-python/cython-0.21[${PYTHON_USEDEP}] + virtual/pkgconfig + cpudetection? ( dev-python/py-cpuinfo[${PYTHON_USEDEP}] ) + test? ( + ${RDEPEND} + ) +" + +python_prepare_all() { + local PATCHES=( + # https://github.com/PyTables/PyTables/pull/1176 + "${FILESDIR}/${P}-numpy-2.patch" + # https://github.com/PyTables/PyTables/commit/4a1b480e7e3758cf2cf06354ec5720020db16ce7 + # https://github.com/PyTables/PyTables/commit/424784895b0fb15ad06707ce60f9829cef4f11e2 + "${FILESDIR}/${P}-py313.patch" + ) + + rm -r c-blosc/{blosc,internal-complibs} || die + + # part of https://github.com/PyTables/PyTables/commit/4a1b480e7e3758cf2cf06354ec5720020db16ce7 + # (warning: do it *before* patching, so it doesn't modify + # the added function) + find -name '*.py' -exec \ + sed -i -e 's:unittest[.]makeSuite:make_suite:' {} + || die + + distutils-r1_python_prepare_all + + sed -i -e '/blosc2/d' pyproject.toml || die + hprefixify -w '/prefixes =/' setup.py + + export PYTABLES_NO_EMBEDDED_LIBS=1 + export USE_PKGCONFIG=TRUE +} + +python_test() { + cd "${BUILD_DIR}/install$(python_get_sitedir)" || die + "${EPYTHON}" tables/tests/test_all.py -v || die +} + +python_install_all() { + distutils-r1_python_install_all + + if use examples; then + dodoc -r contrib examples + docompress -x /usr/share/doc/${PF}/{contrib,examples} + fi +} |