Skip to content

TYP: @final for more Index methods #39333

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jan 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 14 additions & 6 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -822,6 +822,7 @@ def take(self, indices, axis=0, allow_fill=True, fill_value=None, **kwargs):
)
return self._shallow_copy(taken)

@final
def _maybe_disallow_fill(self, allow_fill: bool, fill_value, indices) -> bool:
"""
We only use pandas-style take when allow_fill is True _and_
Expand Down Expand Up @@ -960,6 +961,7 @@ def __deepcopy__(self, memo=None):
# --------------------------------------------------------------------
# Rendering Methods

@final
def __repr__(self) -> str_t:
"""
Return a string representation for this object.
Expand Down Expand Up @@ -1710,6 +1712,7 @@ def droplevel(self, level=0):

return self._drop_level_numbers(levnums)

@final
def _drop_level_numbers(self, levnums: List[int]):
"""
Drop MultiIndex levels by level _number_, not name.
Expand Down Expand Up @@ -1827,6 +1830,7 @@ def is_monotonic_decreasing(self) -> bool:
"""
return self._engine.is_monotonic_decreasing

@final
@property
def _is_strictly_monotonic_increasing(self) -> bool:
"""
Expand All @@ -1844,6 +1848,7 @@ def _is_strictly_monotonic_increasing(self) -> bool:
"""
return self.is_unique and self.is_monotonic_increasing

@final
@property
def _is_strictly_monotonic_decreasing(self) -> bool:
"""
Expand All @@ -1868,6 +1873,7 @@ def is_unique(self) -> bool:
"""
return self._engine.is_unique

@final
@property
def has_duplicates(self) -> bool:
"""
Expand Down Expand Up @@ -2239,6 +2245,7 @@ def _is_all_dates(self) -> bool:
return is_datetime_array(ensure_object(self._values))

@cache_readonly
@final
def is_all_dates(self):
"""
Whether or not the index values only consist of dates.
Expand Down Expand Up @@ -3287,6 +3294,7 @@ def get_loc(self, key, method=None, tolerance=None):
"""

@Appender(_index_shared_docs["get_indexer"] % _index_doc_kwargs)
@final
def get_indexer(
self, target, method=None, limit=None, tolerance=None
) -> np.ndarray:
Expand Down Expand Up @@ -3346,6 +3354,7 @@ def _get_indexer(

return ensure_platform_int(indexer)

@final
def _check_indexing_method(self, method):
"""
Raise if we have a get_indexer `method` that is not supported or valid.
Expand Down Expand Up @@ -4560,7 +4569,7 @@ def putmask(self, mask, value):
np.putmask(values, mask, converted)
return self._shallow_copy(values)

def equals(self, other: object) -> bool:
def equals(self, other: Any) -> bool:
"""
Determine if two Index object are equal.

Expand Down Expand Up @@ -5122,6 +5131,7 @@ def get_indexer_for(self, target, **kwargs):
indexer, _ = self.get_indexer_non_unique(target)
return indexer

@final
def _get_indexer_non_comparable(self, target: Index, method, unique: bool = True):
"""
Called from get_indexer or get_indexer_non_unique when the target
Expand Down Expand Up @@ -5160,7 +5170,7 @@ def _get_indexer_non_comparable(self, target: Index, method, unique: bool = True
return no_matches, missing

@property
def _index_as_unique(self):
def _index_as_unique(self) -> bool:
"""
Whether we should treat this as unique for the sake of
get_indexer vs get_indexer_non_unique.
Expand Down Expand Up @@ -5196,6 +5206,7 @@ def _maybe_promote(self, other: Index):

return self, other

@final
def _should_compare(self, other: Index) -> bool:
"""
Check if `self == other` can ever have non-False entries.
Expand Down Expand Up @@ -5804,10 +5815,6 @@ def _cmp_method(self, other, op):
with np.errstate(all="ignore"):
result = ops.comp_method_OBJECT_ARRAY(op, self._values, other)

elif is_interval_dtype(self.dtype):
with np.errstate(all="ignore"):
result = op(self._values, np.asarray(other))

else:
with np.errstate(all="ignore"):
result = ops.comparison_op(self._values, other, op)
Expand All @@ -5826,6 +5833,7 @@ def _arith_method(self, other, op):
return (Index(result[0]), Index(result[1]))
return Index(result)

@final
def _unary_method(self, op):
result = op(self._values)
return Index(result, name=self.name)
Expand Down
17 changes: 6 additions & 11 deletions pandas/core/indexes/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def __array_wrap__(self, result, context=None):

# ------------------------------------------------------------------------

def equals(self, other: object) -> bool:
def equals(self, other: Any) -> bool:
"""
Determines if two Index objects contain the same elements.
"""
Expand Down Expand Up @@ -508,7 +508,7 @@ def _partial_date_slice(
__truediv__ = make_wrapped_arith_op("__truediv__")
__rtruediv__ = make_wrapped_arith_op("__rtruediv__")

def shift(self, periods=1, freq=None):
def shift(self: _T, periods: int = 1, freq=None) -> _T:
"""
Shift index by desired number of time frequency increments.

Expand Down Expand Up @@ -567,7 +567,7 @@ def _get_delete_freq(self, loc: int):
freq = self.freq
return freq

def _get_insert_freq(self, loc, item):
def _get_insert_freq(self, loc: int, item):
"""
Find the `freq` for self.insert(loc, item).
"""
Expand All @@ -593,7 +593,7 @@ def _get_insert_freq(self, loc, item):
return freq

@doc(NDArrayBackedExtensionIndex.delete)
def delete(self, loc):
def delete(self: _T, loc) -> _T:
result = super().delete(loc)
result._data._freq = self._get_delete_freq(loc)
return result
Expand Down Expand Up @@ -710,6 +710,7 @@ def _intersection(self, other: Index, sort=False) -> Index:
return self._wrap_setop_result(other, result)

def _can_fast_intersect(self: _T, other: _T) -> bool:
# Note: we only get here with len(self) > 0 and len(other) > 0
if self.freq is None:
return False

Expand All @@ -725,9 +726,6 @@ def _can_fast_intersect(self: _T, other: _T) -> bool:
# so intersection will preserve freq
return True

elif not len(self) or not len(other):
return False

elif isinstance(self.freq, Tick):
# We "line up" if and only if the difference between two of our points
# is a multiple of our freq
Expand All @@ -741,9 +739,6 @@ def _can_fast_union(self: _T, other: _T) -> bool:
# Assumes that type(self) == type(other), as per the annotation
# The ability to fast_union also implies that `freq` should be
# retained on union.
if not isinstance(other, type(self)):
return False

freq = self.freq

if freq is None or freq != other.freq:
Expand All @@ -769,7 +764,7 @@ def _can_fast_union(self: _T, other: _T) -> bool:
# Only need to "adjoin", not overlap
return (right_start == left_end + freq) or right_start in left

def _fast_union(self, other, sort=None):
def _fast_union(self: _T, other: _T, sort=None) -> _T:
if len(other) == 0:
return self.view(type(self))

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,7 @@ def _get_indexer_pointwise(self, target: Index) -> Tuple[np.ndarray, np.ndarray]
return ensure_platform_int(indexer), ensure_platform_int(missing)

@property
def _index_as_unique(self):
def _index_as_unique(self) -> bool:
return not self.is_overlapping

_requires_unique_msg = (
Expand Down
4 changes: 0 additions & 4 deletions pandas/core/indexes/range.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,10 +337,6 @@ def is_monotonic_increasing(self) -> bool:
def is_monotonic_decreasing(self) -> bool:
return self._range.step < 0 or len(self) <= 1

@property
def has_duplicates(self) -> bool:
return False

def __contains__(self, key: Any) -> bool:
hash(key)
try:
Expand Down