Skip to content

Commit 932de85

Browse files
Callidiorperimosocordiae
authored andcommitted
Implementation of MMC (#61)
* Implementation of PGDM * Python2 compatibility * Speed up PGDM on high-dimensional data * Optimized some summations using `np.einsum` * Addressed requests from review by perimosocordiae * Renamed PGDM to MMC * Addressed 2nd review by perimosocordiae
1 parent 5c1cc66 commit 932de85

9 files changed

+526
-18
lines changed

README.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ Metric Learning algorithms in Python.
1515
- Local Fisher Discriminant Analysis (LFDA)
1616
- Relative Components Analysis (RCA)
1717
- Metric Learning for Kernel Regression (MLKR)
18+
- Mahalanobis Metric for Clustering (MMC)
1819

1920
**Dependencies**
2021

metric_learn/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,4 @@
1010
from .lfda import LFDA
1111
from .rca import RCA, RCA_Supervised
1212
from .mlkr import MLKR
13+
from .mmc import MMC, MMC_Supervised

metric_learn/_util.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
import numpy as np
2+
3+
4+
# hack around lack of axis kwarg in older numpy versions
5+
try:
6+
np.linalg.norm([[4]], axis=1)
7+
except TypeError:
8+
def vector_norm(X):
9+
return np.apply_along_axis(np.linalg.norm, 1, X)
10+
else:
11+
def vector_norm(X):
12+
return np.linalg.norm(X, axis=1)

metric_learn/itml.py

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121

2222
from .base_metric import BaseMetricLearner
2323
from .constraints import Constraints
24+
from ._util import vector_norm
2425

2526

2627
class ITML(BaseMetricLearner):
@@ -54,10 +55,10 @@ def _process_inputs(self, X, constraints, bounds):
5455
self.X_ = X = check_array(X)
5556
# check to make sure that no two constrained vectors are identical
5657
a,b,c,d = constraints
57-
ident = _vector_norm(X[a] - X[b]) > 1e-9
58-
a, b = a[ident], b[ident]
59-
ident = _vector_norm(X[c] - X[d]) > 1e-9
60-
c, d = c[ident], d[ident]
58+
no_ident = vector_norm(X[a] - X[b]) > 1e-9
59+
a, b = a[no_ident], b[no_ident]
60+
no_ident = vector_norm(X[c] - X[d]) > 1e-9
61+
c, d = c[no_ident], d[no_ident]
6162
# init bounds
6263
if bounds is None:
6364
self.bounds_ = np.percentile(pairwise_distances(X), (5, 95))
@@ -138,16 +139,6 @@ def fit(self, X, constraints, bounds=None):
138139
def metric(self):
139140
return self.A_
140141

141-
# hack around lack of axis kwarg in older numpy versions
142-
try:
143-
np.linalg.norm([[4]], axis=1)
144-
except TypeError:
145-
def _vector_norm(X):
146-
return np.apply_along_axis(np.linalg.norm, 1, X)
147-
else:
148-
def _vector_norm(X):
149-
return np.linalg.norm(X, axis=1)
150-
151142

152143
class ITML_Supervised(ITML):
153144
"""Information Theoretic Metric Learning (ITML)"""

0 commit comments

Comments
 (0)