Skip to content

Commit d6678d1

Browse files
authored
REF: de-duplicate IntervalIndex compat code (#36372)
1 parent 234f5ac commit d6678d1

File tree

3 files changed

+17
-36
lines changed

3 files changed

+17
-36
lines changed

pandas/core/indexes/base.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3316,7 +3316,7 @@ def _can_reindex(self, indexer):
33163316
ValueError if its a duplicate axis
33173317
"""
33183318
# trying to reindex on an axis with duplicates
3319-
if not self.is_unique and len(indexer):
3319+
if not self._index_as_unique and len(indexer):
33203320
raise ValueError("cannot reindex from a duplicate axis")
33213321

33223322
def reindex(self, target, method=None, level=None, limit=None, tolerance=None):
@@ -3360,8 +3360,7 @@ def reindex(self, target, method=None, level=None, limit=None, tolerance=None):
33603360
if self.equals(target):
33613361
indexer = None
33623362
else:
3363-
# check is_overlapping for IntervalIndex compat
3364-
if self.is_unique and not getattr(self, "is_overlapping", False):
3363+
if self._index_as_unique:
33653364
indexer = self.get_indexer(
33663365
target, method=method, limit=limit, tolerance=tolerance
33673366
)
@@ -4759,11 +4758,21 @@ def get_indexer_for(self, target, **kwargs):
47594758
numpy.ndarray
47604759
List of indices.
47614760
"""
4762-
if self.is_unique:
4761+
if self._index_as_unique:
47634762
return self.get_indexer(target, **kwargs)
47644763
indexer, _ = self.get_indexer_non_unique(target, **kwargs)
47654764
return indexer
47664765

4766+
@property
4767+
def _index_as_unique(self):
4768+
"""
4769+
Whether we should treat this as unique for the sake of
4770+
get_indexer vs get_indexer_non_unique.
4771+
4772+
For IntervalIndex compat.
4773+
"""
4774+
return self.is_unique
4775+
47674776
def _maybe_promote(self, other: "Index"):
47684777
"""
47694778
When dealing with an object-dtype Index and a non-object Index, see

pandas/core/indexes/interval.py

Lines changed: 3 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -516,22 +516,6 @@ def is_overlapping(self) -> bool:
516516
# GH 23309
517517
return self._engine.is_overlapping
518518

519-
def _can_reindex(self, indexer: np.ndarray) -> None:
520-
"""
521-
Check if we are allowing reindexing with this particular indexer.
522-
523-
Parameters
524-
----------
525-
indexer : an integer indexer
526-
527-
Raises
528-
------
529-
ValueError if its a duplicate axis
530-
"""
531-
# trying to reindex on an axis with duplicates
532-
if self.is_overlapping and len(indexer):
533-
raise ValueError("cannot reindex from an overlapping axis")
534-
535519
def _needs_i8_conversion(self, key) -> bool:
536520
"""
537521
Check if a given key needs i8 conversion. Conversion is necessary for
@@ -839,21 +823,9 @@ def get_indexer_non_unique(
839823

840824
return ensure_platform_int(indexer), ensure_platform_int(missing)
841825

842-
def get_indexer_for(self, target: AnyArrayLike, **kwargs) -> np.ndarray:
843-
"""
844-
Guaranteed return of an indexer even when overlapping.
845-
846-
This dispatches to get_indexer or get_indexer_non_unique
847-
as appropriate.
848-
849-
Returns
850-
-------
851-
numpy.ndarray
852-
List of indices.
853-
"""
854-
if self.is_overlapping:
855-
return self.get_indexer_non_unique(target)[0]
856-
return self.get_indexer(target, **kwargs)
826+
@property
827+
def _index_as_unique(self):
828+
return not self.is_overlapping
857829

858830
def _convert_slice_indexer(self, key: slice, kind: str):
859831
if not (key.step is None or key.step == 1):

pandas/core/indexing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1256,7 +1256,7 @@ def _get_listlike_indexer(self, key, axis: int, raise_missing: bool = False):
12561256
)
12571257
return ax[indexer], indexer
12581258

1259-
if ax.is_unique and not getattr(ax, "is_overlapping", False):
1259+
if ax._index_as_unique:
12601260
indexer = ax.get_indexer_for(keyarr)
12611261
keyarr = ax.reindex(keyarr)[0]
12621262
else:

0 commit comments

Comments
 (0)