Skip to content

Commit c15f1c3

Browse files
Drop support for python 2 and python 3.5 (#291)
* Drop Python2 support * Fix test failures and pyflakes error * fix pep8 issue * Stop testing py3.5 * Update README
1 parent 2d5a942 commit c15f1c3

20 files changed

+58
-146
lines changed

.travis.yml

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -7,23 +7,12 @@ env:
77
- SKGGM_VERSION=a0ed406586c4364ea3297a658f415e13b5cbdaf8
88
matrix:
99
include:
10-
- name: "Pytest python 2.7 with skggm"
11-
python: "2.7"
10+
- name: "Pytest python 3.6 without skggm"
11+
python: "3.6"
1212
before_install:
1313
- sudo apt-get install liblapack-dev
1414
- pip install --upgrade pip pytest
1515
- pip install wheel cython numpy scipy codecov pytest-cov scikit-learn
16-
- pip install git+https://github.com/skggm/skggm.git@${SKGGM_VERSION};
17-
script:
18-
- pytest test --cov;
19-
after_success:
20-
- bash <(curl -s https://codecov.io/bash)
21-
- name: "Pytest python 3.4 without skggm"
22-
python: "3.4"
23-
before_install:
24-
- sudo apt-get install liblapack-dev
25-
- pip install --upgrade pip "pytest<5"
26-
- pip install wheel cython numpy scipy codecov pytest-cov scikit-learn
2716
script:
2817
- pytest test --cov;
2918
after_success:

README.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ metric-learn contains efficient Python implementations of several popular superv
1919

2020
**Dependencies**
2121

22-
- Python 2.7+, 3.4+
22+
- Python 3.6+
2323
- numpy, scipy, scikit-learn>=0.20.3
2424

2525
**Optional dependencies**

metric_learn/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import absolute_import
2-
31
from .constraints import Constraints
42
from .covariance import Covariance
53
from .itml import ITML, ITML_Supervised

metric_learn/_util.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import numpy as np
2-
import six
32
from numpy.linalg import LinAlgError
43
from sklearn.datasets import make_spd_matrix
54
from sklearn.decomposition import PCA
@@ -283,7 +282,7 @@ def make_name(estimator):
283282
if a string is given
284283
"""
285284
if estimator is not None:
286-
if isinstance(estimator, six.string_types):
285+
if isinstance(estimator, str):
287286
estimator_name = estimator
288287
else:
289288
estimator_name = estimator.__class__.__name__

metric_learn/base_metric.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,11 @@
88
from sklearn.metrics import roc_auc_score, roc_curve, precision_recall_curve
99
import numpy as np
1010
from abc import ABCMeta, abstractmethod
11-
import six
1211
from ._util import ArrayIndexer, check_input, validate_vector
1312
import warnings
1413

1514

16-
class BaseMetricLearner(six.with_metaclass(ABCMeta, BaseEstimator)):
15+
class BaseMetricLearner(BaseEstimator, metaclass=ABCMeta):
1716
"""
1817
Base class for all metric-learners.
1918
@@ -145,7 +144,7 @@ def get_metric(self):
145144
"""
146145

147146

148-
class MetricTransformer(six.with_metaclass(ABCMeta)):
147+
class MetricTransformer(metaclass=ABCMeta):
149148

150149
@abstractmethod
151150
def transform(self, X):
@@ -163,8 +162,8 @@ def transform(self, X):
163162
"""
164163

165164

166-
class MahalanobisMixin(six.with_metaclass(ABCMeta, BaseMetricLearner,
167-
MetricTransformer)):
165+
class MahalanobisMixin(BaseMetricLearner, MetricTransformer,
166+
metaclass=ABCMeta):
168167
r"""Mahalanobis metric learning algorithms.
169168
170169
Algorithm that learns a Mahalanobis (pseudo) distance :math:`d_M(x, x')`,

metric_learn/constraints.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
"""
55
import numpy as np
66
import warnings
7-
from six.moves import xrange
87
from sklearn.utils import check_random_state
98
from sklearn.neighbors import NearestNeighbors
109

@@ -245,7 +244,7 @@ def chunks(self, num_chunks=100, chunk_size=2, random_state=None):
245244
chunks = -np.ones_like(self.partial_labels, dtype=int)
246245
uniq, lookup = np.unique(self.partial_labels, return_inverse=True)
247246
unknown_uniq = np.where(uniq < 0)[0]
248-
all_inds = [set(np.where(lookup == c)[0]) for c in xrange(len(uniq))
247+
all_inds = [set(np.where(lookup == c)[0]) for c in range(len(uniq))
249248
if c not in unknown_uniq]
250249
max_chunks = int(np.sum([len(s) // chunk_size for s in all_inds]))
251250
if max_chunks < num_chunks:

metric_learn/covariance.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
Covariance metric (baseline method)
33
"""
44

5-
from __future__ import absolute_import
65
import numpy as np
76
import scipy
87
from sklearn.base import TransformerMixin

metric_learn/itml.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,8 @@
22
Information Theoretic Metric Learning (ITML)
33
"""
44

5-
from __future__ import print_function, absolute_import
65
import warnings
76
import numpy as np
8-
from six.moves import xrange
97
from sklearn.exceptions import ChangedBehaviorWarning
108
from sklearn.metrics import pairwise_distances
119
from sklearn.utils.validation import check_array
@@ -69,7 +67,7 @@ def _fit(self, pairs, y, bounds=None):
6967
pos_vv = pos_pairs[:, 0, :] - pos_pairs[:, 1, :]
7068
neg_vv = neg_pairs[:, 0, :] - neg_pairs[:, 1, :]
7169

72-
for it in xrange(self.max_iter):
70+
for it in range(self.max_iter):
7371
# update positives
7472
for i, v in enumerate(pos_vv):
7573
wtw = v.dot(A).dot(v) # scalar

metric_learn/lfda.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
"""
22
Local Fisher Discriminant Analysis (LFDA)
33
"""
4-
from __future__ import division, absolute_import
54
import numpy as np
65
import scipy
76
import warnings
8-
from six.moves import xrange
97
from sklearn.metrics import pairwise_distances
108
from sklearn.base import TransformerMixin
119

@@ -127,7 +125,7 @@ def fit(self, X, y):
127125
tSb = np.zeros((d, d))
128126
tSw = np.zeros((d, d))
129127

130-
for c in xrange(num_classes):
128+
for c in range(num_classes):
131129
Xc = X[y == c]
132130
nc = Xc.shape[0]
133131

metric_learn/lmnn.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
"""
22
Large Margin Nearest Neighbor Metric learning (LMNN)
33
"""
4-
from __future__ import print_function, absolute_import
54
import numpy as np
65
import warnings
76
from collections import Counter
8-
from six.moves import xrange
97
from sklearn.exceptions import ChangedBehaviorWarning
108
from sklearn.metrics import euclidean_distances
119
from sklearn.base import TransformerMixin
@@ -229,7 +227,7 @@ def fit(self, X, y):
229227
"| learning rate")
230228

231229
# main loop
232-
for it in xrange(2, self.max_iter):
230+
for it in range(2, self.max_iter):
233231
# then at each iteration, we try to find a value of L that has better
234232
# objective than the previous L, following the gradient:
235233
while True:
@@ -293,7 +291,7 @@ def _loss_grad(self, X, L, dfG, k, reg, target_neighbors, label_inds):
293291
# compute the gradient
294292
total_active = 0
295293
df = np.zeros((X.shape[1], X.shape[1]))
296-
for nn_idx in reversed(xrange(k)): # note: reverse not useful here
294+
for nn_idx in reversed(range(k)): # note: reverse not useful here
297295
act1 = g0 < g1[:, nn_idx]
298296
act2 = g0 < g2[:, nn_idx]
299297
total_active += act1.sum() + act2.sum()

metric_learn/lsml.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,9 @@
22
Metric Learning from Relative Comparisons by Minimizing Squared Residual (LSML)
33
"""
44

5-
from __future__ import print_function, absolute_import, division
65
import warnings
76
import numpy as np
87
import scipy.linalg
9-
from six.moves import xrange
108
from sklearn.base import TransformerMixin
119
from sklearn.exceptions import ChangedBehaviorWarning
1210

@@ -66,7 +64,7 @@ def _fit(self, quadruplets, weights=None):
6664
s_best = self._total_loss(M, vab, vcd, prior_inv)
6765
if self.verbose:
6866
print('initial loss', s_best)
69-
for it in xrange(1, self.max_iter + 1):
67+
for it in range(1, self.max_iter + 1):
7068
grad = self._gradient(M, vab, vcd, prior_inv)
7169
grad_norm = scipy.linalg.norm(grad)
7270
if grad_norm < self.tol:

metric_learn/mlkr.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""
22
Metric Learning for Kernel Regression (MLKR)
33
"""
4-
from __future__ import division, print_function
54
import time
65
import sys
76
import warnings

metric_learn/mmc.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
"""Mahalanobis Metric for Clustering (MMC)"""
2-
from __future__ import print_function, absolute_import, division
32
import warnings
43
import numpy as np
5-
from six.moves import xrange
64
from sklearn.base import TransformerMixin
75
from sklearn.utils.validation import assert_all_finite
86
from sklearn.exceptions import ChangedBehaviorWarning
@@ -110,12 +108,12 @@ def _fit_full(self, pairs, y):
110108

111109
A_old = A.copy()
112110

113-
for cycle in xrange(self.max_iter):
111+
for cycle in range(self.max_iter):
114112

115113
# projection of constraints C1 and C2
116114
satisfy = False
117115

118-
for it in xrange(self.max_proj):
116+
for it in range(self.max_proj):
119117

120118
# First constraint:
121119
# f(A) = \sum_{i,j \in S} d_ij' A d_ij <= t (1)

metric_learn/nca.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
Neighborhood Components Analysis (NCA)
33
"""
44

5-
from __future__ import absolute_import
65
import warnings
76
import time
87
import sys

metric_learn/rca.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,8 @@
22
Relative Components Analysis (RCA)
33
"""
44

5-
from __future__ import absolute_import
65
import numpy as np
76
import warnings
8-
from six.moves import xrange
97
from sklearn.base import TransformerMixin
108
from sklearn.exceptions import ChangedBehaviorWarning
119

@@ -22,7 +20,7 @@ def _chunk_mean_centering(data, chunks):
2220
# mean on it
2321
chunk_data = data[chunk_mask].astype(float, copy=False)
2422
chunk_labels = chunks[chunk_mask]
25-
for c in xrange(num_chunks):
23+
for c in range(num_chunks):
2624
mask = chunk_labels == c
2725
chunk_data[mask] -= chunk_data[mask].mean(axis=0)
2826

metric_learn/sdml.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
Sparse High-Dimensional Metric Learning (SDML)
33
"""
44

5-
from __future__ import absolute_import
65
import warnings
76
import numpy as np
87
from sklearn.base import TransformerMixin

setup.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
classifiers=[
3030
'Development Status :: 4 - Beta',
3131
'License :: OSI Approved :: MIT License',
32-
'Programming Language :: Python',
32+
'Programming Language :: Python :: 3',
3333
'Operating System :: OS Independent',
3434
'Intended Audience :: Science/Research',
3535
'Topic :: Scientific/Engineering'
@@ -39,7 +39,6 @@
3939
'numpy',
4040
'scipy',
4141
'scikit-learn',
42-
'six'
4342
],
4443
extras_require=dict(
4544
docs=['sphinx', 'shinx_rtd_theme', 'numpydoc'],

test/metric_learn_test.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import numpy as np
55
import scipy
66
from scipy.optimize import check_grad, approx_fprime
7-
from six.moves import xrange
87
from sklearn.metrics import pairwise_distances, euclidean_distances
98
from sklearn.datasets import (load_iris, make_classification, make_regression,
109
make_spd_matrix)
@@ -32,7 +31,7 @@
3231
def class_separation(X, labels):
3332
unique_labels, label_inds = np.unique(labels, return_inverse=True)
3433
ratio = 0
35-
for li in xrange(len(unique_labels)):
34+
for li in range(len(unique_labels)):
3635
Xc = X[label_inds == li]
3736
Xnc = X[label_inds != li]
3837
ratio += pairwise_distances(Xc).mean() / pairwise_distances(Xc, Xnc).mean()
@@ -385,15 +384,15 @@ def loss_fn(L, X, y, target_neighbors, reg):
385384
for j in target_neighbors[i]:
386385
loss += (1 - reg) * np.sum((Lx[i] - Lx[j]) ** 2)
387386
grad += (1 - reg) * np.outer(Lx[i] - Lx[j], X[i] - X[j])
388-
for l in range(X.shape[0]):
389-
if y[i] != y[l]:
387+
for k in range(X.shape[0]):
388+
if y[i] != y[k]:
390389
hin, active = hinge(1 + np.sum((Lx[i] - Lx[j])**2) -
391-
np.sum((Lx[i] - Lx[l])**2))
390+
np.sum((Lx[i] - Lx[k])**2))
392391
total_active += active
393392
if active:
394393
loss += reg * hin
395394
grad += (reg * (np.outer(Lx[i] - Lx[j], X[i] - X[j]) -
396-
np.outer(Lx[i] - Lx[l], X[i] - X[l])))
395+
np.outer(Lx[i] - Lx[k], X[i] - X[k])))
397396
grad = 2 * grad
398397
return grad, loss, total_active
399398

@@ -521,7 +520,7 @@ def test_toy_ex_lmnn(X, y, loss):
521520
# storage
522521
a1 = [None] * k
523522
a2 = [None] * k
524-
for nn_idx in xrange(k):
523+
for nn_idx in range(k):
525524
a1[nn_idx] = np.array([])
526525
a2[nn_idx] = np.array([])
527526

0 commit comments

Comments
 (0)