summaryrefslogtreecommitdiff
path: root/sci-libs/scikit-optimize
diff options
context:
space:
mode:
authorV3n3RiX <venerix@koprulu.sector>2023-11-06 16:19:28 +0000
committerV3n3RiX <venerix@koprulu.sector>2023-11-06 16:19:28 +0000
commit9afce155a599e5f4518f3c7913b6424ac13be12e (patch)
tree60420eafdcf940c0e4555d9aa8e4e9017a646344 /sci-libs/scikit-optimize
parenteb7aa327b218d640c8bda63ba0fd1ace2bd2d17b (diff)
gentoo auto-resync : 06:11:2023 - 16:19:27
Diffstat (limited to 'sci-libs/scikit-optimize')
-rw-r--r--sci-libs/scikit-optimize/Manifest3
-rw-r--r--sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-numpy-1.24.patch22
-rw-r--r--sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-scikit-learn-1.2.0.patch104
-rw-r--r--sci-libs/scikit-optimize/scikit-optimize-0.9.0-r1.ebuild39
4 files changed, 168 insertions, 0 deletions
diff --git a/sci-libs/scikit-optimize/Manifest b/sci-libs/scikit-optimize/Manifest
index 2c0a66e4db19..b8d2f5462f1d 100644
--- a/sci-libs/scikit-optimize/Manifest
+++ b/sci-libs/scikit-optimize/Manifest
@@ -1,3 +1,6 @@
+AUX scikit-optimize-0.9.0-numpy-1.24.patch 892 BLAKE2B c06e68b47aa051546ede619ef5cb910b15ae2eb4f8b3a79058759ad6d7b0f29fe357670e2b6ec46d519e5e5dd1dce934336eee2dceec11cde471ed99d569049b SHA512 0d8d037b8a27e44709b27780f49089c17273d43bb90b102e62427c8847e3cd2b0020379e072c525540a3316d6fa7af0e9566880cb9826531213dda96cdded972
+AUX scikit-optimize-0.9.0-scikit-learn-1.2.0.patch 5047 BLAKE2B eb393b5a3f82478da2d58997dc0a8521a8c3f37c3de05df76d583b9bb6f0d18a149f14b90cc885cacd458c0aeb7e8de55cd1accfe8f16f85491423005fbc8830 SHA512 b501680cf6722ec60fea590f9ea966767108411c22b0ded6f3eb15e5f29d95e57f1f8842e91815b08403fb1e27424cbb2bcfc343ff7e5641a075e1217d8fb19e
DIST scikit-optimize-0.9.0.tar.gz 275570 BLAKE2B ab481bf1cfc2b8c7cff213ae0ce2fa937de8f6269b491cf63ae115eea5c936c8a5c26b7fb339fa6cd2927c5105068635c008d6dc8b3f99b4b5d3abfed1a1c5a2 SHA512 a4c1bd589686dbbabcc5de38a4eb581c040cc2c3f83bc250ddcbe66314f03fc68b7b12d7679049da34c42445b446e1af3873f7ce90bec2a5361f0077ff3e9b74
+EBUILD scikit-optimize-0.9.0-r1.ebuild 1072 BLAKE2B 22c2666059968510416e9b3b8323829cddcc7107d94ba52e54ef5984b676f558067d103e75cc3741346c2f6e465700c061dd4eb8de0075b4f745166195a44323 SHA512 a5705395464c4c000ea9ef8a6fa917de831eb8d4339eabd7e7ef6073d737366c57e3c2a0db60051362107c3d5de89c3877aa5a36597d6a92c37d1dbf2bdc6beb
EBUILD scikit-optimize-0.9.0.ebuild 810 BLAKE2B 4547d60f4efbb1a35da5b878ee2de8ba956dbb84ddad8d23f8deb9c61b9b47221aa169b42b96d582c6aca9ffdb8027ea99d1631704cc90aa458a45e5e166b73d SHA512 8c31bc0322d7ba807a3cf09a59d33b877e043cbf8be6aa841165166a8729d25b68e507a8525b51e47c617a74316e79bb1213a7e80db96c7f160cd3dcd835ebcd
MISC metadata.xml 415 BLAKE2B 3bfa58da8f117a7b62399a17e5259dbfb0e74b9b9acd16e4515bcceaafc2928733f047f229c58bc437907cddf3b8a93c9576a9645e0c910129900072bed94aff SHA512 6343c76ca9a28f321c3fd8c94dfbb912f305ce43025ab6d666ed0aa5a496f08f258e1ab4e11c14844baa3c04c63a43c1d79bc8067a0d02a4eccf0e37c0c686f7
diff --git a/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-numpy-1.24.patch b/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-numpy-1.24.patch
new file mode 100644
index 000000000000..65fc26f3eed1
--- /dev/null
+++ b/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-numpy-1.24.patch
@@ -0,0 +1,22 @@
+diff --git a/skopt/space/transformers.py b/skopt/space/transformers.py
+index 68892952..87cc3b68 100644
+--- a/skopt/space/transformers.py
++++ b/skopt/space/transformers.py
+@@ -259,7 +259,7 @@ def transform(self, X):
+ if (self.high - self.low) == 0.:
+ return X * 0.
+ if self.is_int:
+- return (np.round(X).astype(np.int) - self.low) /\
++ return (np.round(X).astype(np.int64) - self.low) /\
+ (self.high - self.low)
+ else:
+ return (X - self.low) / (self.high - self.low)
+@@ -272,7 +272,7 @@ def inverse_transform(self, X):
+ raise ValueError("All values should be greater than 0.0")
+ X_orig = X * (self.high - self.low) + self.low
+ if self.is_int:
+- return np.round(X_orig).astype(np.int)
++ return np.round(X_orig).astype(np.int64)
+ return X_orig
+
+
diff --git a/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-scikit-learn-1.2.0.patch b/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-scikit-learn-1.2.0.patch
new file mode 100644
index 000000000000..8cf8cff9479f
--- /dev/null
+++ b/sci-libs/scikit-optimize/files/scikit-optimize-0.9.0-scikit-learn-1.2.0.patch
@@ -0,0 +1,104 @@
+diff --git a/skopt/learning/forest.py b/skopt/learning/forest.py
+index 096770c1d..ebde568f5 100644
+--- a/skopt/learning/forest.py
++++ b/skopt/learning/forest.py
+@@ -27,7 +27,7 @@ def _return_std(X, trees, predictions, min_variance):
+ -------
+ std : array-like, shape=(n_samples,)
+ Standard deviation of `y` at `X`. If criterion
+- is set to "mse", then `std[i] ~= std(y | X[i])`.
++ is set to "squared_error", then `std[i] ~= std(y | X[i])`.
+
+ """
+ # This derives std(y | x) as described in 4.3.2 of arXiv:1211.0906
+@@ -61,9 +61,9 @@ class RandomForestRegressor(_sk_RandomForestRegressor):
+ n_estimators : integer, optional (default=10)
+ The number of trees in the forest.
+
+- criterion : string, optional (default="mse")
++ criterion : string, optional (default="squared_error")
+ The function to measure the quality of a split. Supported criteria
+- are "mse" for the mean squared error, which is equal to variance
++ are "squared_error" for the mean squared error, which is equal to variance
+ reduction as feature selection criterion, and "mae" for the mean
+ absolute error.
+
+@@ -194,7 +194,7 @@ class RandomForestRegressor(_sk_RandomForestRegressor):
+ .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
+
+ """
+- def __init__(self, n_estimators=10, criterion='mse', max_depth=None,
++ def __init__(self, n_estimators=10, criterion='squared_error', max_depth=None,
+ min_samples_split=2, min_samples_leaf=1,
+ min_weight_fraction_leaf=0.0, max_features='auto',
+ max_leaf_nodes=None, min_impurity_decrease=0.,
+@@ -228,20 +228,20 @@ def predict(self, X, return_std=False):
+ Returns
+ -------
+ predictions : array-like of shape = (n_samples,)
+- Predicted values for X. If criterion is set to "mse",
++ Predicted values for X. If criterion is set to "squared_error",
+ then `predictions[i] ~= mean(y | X[i])`.
+
+ std : array-like of shape=(n_samples,)
+ Standard deviation of `y` at `X`. If criterion
+- is set to "mse", then `std[i] ~= std(y | X[i])`.
++ is set to "squared_error", then `std[i] ~= std(y | X[i])`.
+
+ """
+ mean = super(RandomForestRegressor, self).predict(X)
+
+ if return_std:
+- if self.criterion != "mse":
++ if self.criterion != "squared_error":
+ raise ValueError(
+- "Expected impurity to be 'mse', got %s instead"
++ "Expected impurity to be 'squared_error', got %s instead"
+ % self.criterion)
+ std = _return_std(X, self.estimators_, mean, self.min_variance)
+ return mean, std
+@@ -257,9 +257,9 @@ class ExtraTreesRegressor(_sk_ExtraTreesRegressor):
+ n_estimators : integer, optional (default=10)
+ The number of trees in the forest.
+
+- criterion : string, optional (default="mse")
++ criterion : string, optional (default="squared_error")
+ The function to measure the quality of a split. Supported criteria
+- are "mse" for the mean squared error, which is equal to variance
++ are "squared_error" for the mean squared error, which is equal to variance
+ reduction as feature selection criterion, and "mae" for the mean
+ absolute error.
+
+@@ -390,7 +390,7 @@ class ExtraTreesRegressor(_sk_ExtraTreesRegressor):
+ .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
+
+ """
+- def __init__(self, n_estimators=10, criterion='mse', max_depth=None,
++ def __init__(self, n_estimators=10, criterion='squared_error', max_depth=None,
+ min_samples_split=2, min_samples_leaf=1,
+ min_weight_fraction_leaf=0.0, max_features='auto',
+ max_leaf_nodes=None, min_impurity_decrease=0.,
+@@ -425,19 +425,19 @@ def predict(self, X, return_std=False):
+ Returns
+ -------
+ predictions : array-like of shape=(n_samples,)
+- Predicted values for X. If criterion is set to "mse",
++ Predicted values for X. If criterion is set to "squared_error",
+ then `predictions[i] ~= mean(y | X[i])`.
+
+ std : array-like of shape=(n_samples,)
+ Standard deviation of `y` at `X`. If criterion
+- is set to "mse", then `std[i] ~= std(y | X[i])`.
++ is set to "squared_error", then `std[i] ~= std(y | X[i])`.
+ """
+ mean = super(ExtraTreesRegressor, self).predict(X)
+
+ if return_std:
+- if self.criterion != "mse":
++ if self.criterion != "squared_error":
+ raise ValueError(
+- "Expected impurity to be 'mse', got %s instead"
++ "Expected impurity to be 'squared_error', got %s instead"
+ % self.criterion)
+ std = _return_std(X, self.estimators_, mean, self.min_variance)
+ return mean, std
diff --git a/sci-libs/scikit-optimize/scikit-optimize-0.9.0-r1.ebuild b/sci-libs/scikit-optimize/scikit-optimize-0.9.0-r1.ebuild
new file mode 100644
index 000000000000..694cd3ffafeb
--- /dev/null
+++ b/sci-libs/scikit-optimize/scikit-optimize-0.9.0-r1.ebuild
@@ -0,0 +1,39 @@
+# Copyright 2020-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_USE_PEP517=setuptools
+PYPI_NO_NORMALIZE=1
+PYTHON_COMPAT=( python3_{10..11} )
+inherit distutils-r1 pypi
+
+DESCRIPTION="Sequential model-based optimization library"
+HOMEPAGE="https://scikit-optimize.github.io/"
+
+LICENSE="BSD"
+SLOT="0"
+KEYWORDS="~amd64"
+
+RDEPEND="
+ >=dev-python/joblib-0.11[${PYTHON_USEDEP}]
+ dev-python/pyyaml[${PYTHON_USEDEP}]
+ >=dev-python/matplotlib-2.0.0[${PYTHON_USEDEP}]
+ >=dev-python/numpy-1.13.3[${PYTHON_USEDEP}]
+ >=dev-python/scipy-0.19.1[${PYTHON_USEDEP}]
+ >=sci-libs/scikit-learn-0.20.0[${PYTHON_USEDEP}]
+"
+
+PATCHES=(
+ # https://github.com/scikit-optimize/scikit-optimize/pull/1187
+ "${FILESDIR}/${P}-numpy-1.24.patch"
+ # https://github.com/scikit-optimize/scikit-optimize/pull/1184/files
+ "${FILESDIR}/${P}-scikit-learn-1.2.0.patch"
+)
+
+distutils_enable_tests pytest
+# No such file or directory: image/logo.png
+#distutils_enable_sphinx doc \
+# dev-python/numpydoc \
+# dev-python/sphinx-issues \
+# dev-python/sphinx-gallery