Update to work with new versions of scipy and scikit

master
tgsmith61591 2019-05-10 07:32:44 -05:00
parent e67c3221ac
commit cdc8cbad4c
16 changed files with 11 additions and 14 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 123 KiB

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 175 KiB

After

Width:  |  Height:  |  Size: 176 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 174 KiB

After

Width:  |  Height:  |  Size: 175 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

@ -3,8 +3,7 @@
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from sklearn.externals import six
import six
__all__ = [
'BaseSimpleEstimator'

View File

@ -103,4 +103,4 @@ class KNNClassifier(BaseSimpleEstimator):
# We want the most common along the rows as the predictions
# I.e:
# array([1, ..., 0])
return mode(predicted_labels, axis=-1)[0].ravel()
return mode(predicted_labels, axis=1)[0].ravel()

View File

@ -2,7 +2,7 @@
from __future__ import absolute_import
from sklearn.externals import six
import six
from abc import ABCMeta, abstractmethod
import numpy as np

View File

@ -2,7 +2,7 @@
from __future__ import absolute_import
from sklearn.externals import six
import six
from abc import ABCMeta, abstractmethod
__all__ = [

View File

@ -70,7 +70,7 @@ class SimpleLinearRegression(BaseSimpleEstimator):
# Let's compute the least squares on X wrt y
# Least squares solves the equation `a x = b` by computing a
# vector `x` that minimizes the Euclidean 2-norm `|| b - a x ||^2`.
theta, _, rank, singular_values = lstsq(X, y)
theta, _, rank, singular_values = lstsq(X, y, rcond=None)
# finally, we compute the intercept values as the mean of the target
# variable MINUS the inner product of the X_means and the coefficients

View File

@ -5,7 +5,6 @@ from __future__ import absolute_import
from sklearn.datasets import load_iris
from packtml.utils import linalg
from numpy.testing import assert_array_almost_equal
import numpy as np
iris = load_iris()
@ -17,7 +16,7 @@ def test_row_norms():
X_centered = X - means
norms = linalg.l2_norm(X_centered, axis=0)
assert_array_almost_equal(
assert np.allclose(
norms,
np.array([ 10.10783524, 5.29269308,
21.53749599, 9.31556404]))
np.array([10.10783524, 5.29269308, 21.53749599, 9.31556404]),
rtol=0.01)

View File

@ -3,7 +3,6 @@
from __future__ import absolute_import
from sklearn.externals import six
from sklearn.utils.validation import check_random_state
from sklearn.model_selection import ShuffleSplit
import numpy as np

View File

@ -1,5 +1,5 @@
numpy>=0.11
numpy>=0.15
scipy>=0.19
scikit-learn>=0.18
pandas
scikit-learn>=0.19
pandas>=0.23
matplotlib