Skip to content

REF: docstrings that dont need to go into shared_docs #31408

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jan 31, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
156 changes: 42 additions & 114 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,9 +505,8 @@ def _get_attributes_dict(self):
"""
return {k: getattr(self, k, None) for k in self._attributes}

_index_shared_docs[
"_shallow_copy"
] = """
def _shallow_copy(self, values=None, **kwargs):
"""
Create a new Index with the same class as the caller, don't copy the
data, use the same object attributes with passed in attributes taking
precedence.
Expand All @@ -519,9 +518,6 @@ def _get_attributes_dict(self):
values : the values to create the new Index, optional
kwargs : updates the default attributes for this Index
"""

@Appender(_index_shared_docs["_shallow_copy"])
def _shallow_copy(self, values=None, **kwargs):
if values is None:
values = self.values

Expand Down Expand Up @@ -659,9 +655,8 @@ def view(self, cls=None):
result._id = self._id
return result

_index_shared_docs[
"astype"
] = """
def astype(self, dtype, copy=True):
"""
Create an Index with values cast to dtypes. The class of a new Index
is determined by dtype. When conversion is impossible, a ValueError
exception is raised.
Expand All @@ -683,9 +678,6 @@ def view(self, cls=None):
Index
Index with values cast to specified dtype.
"""

@Appender(_index_shared_docs["astype"])
def astype(self, dtype, copy=True):
if is_dtype_equal(self.dtype, dtype):
return self.copy() if copy else self

Expand Down Expand Up @@ -823,9 +815,8 @@ def repeat(self, repeats, axis=None):
# --------------------------------------------------------------------
# Copying Methods

_index_shared_docs[
"copy"
] = """
def copy(self, name=None, deep=False, dtype=None, **kwargs):
"""
Make a copy of this object. Name and dtype sets those attributes on
the new object.

Expand All @@ -844,9 +835,6 @@ def repeat(self, repeats, axis=None):
In most cases, there should be no functional difference from using
``deep``, but if ``deep`` is passed it will attempt to deepcopy.
"""

@Appender(_index_shared_docs["copy"])
def copy(self, name=None, deep=False, dtype=None, **kwargs):
if deep:
new_index = self._shallow_copy(self._data.copy())
else:
Expand Down Expand Up @@ -1537,9 +1525,8 @@ def droplevel(self, level=0):
verify_integrity=False,
)

_index_shared_docs[
"_get_grouper_for_level"
] = """
def _get_grouper_for_level(self, mapper, level=None):
"""
Get index grouper corresponding to an index level

Parameters
Expand All @@ -1558,9 +1545,6 @@ def droplevel(self, level=0):
uniques : Index or None
Index of unique values for level.
"""

@Appender(_index_shared_docs["_get_grouper_for_level"])
def _get_grouper_for_level(self, mapper, level=None):
assert level is None or level == 0
if mapper is None:
grouper = self
Expand Down Expand Up @@ -2156,9 +2140,8 @@ def notna(self):

notnull = notna

_index_shared_docs[
"fillna"
] = """
def fillna(self, value=None, downcast=None):
"""
Fill NA/NaN values with the specified value.

Parameters
Expand All @@ -2175,9 +2158,6 @@ def notna(self):
-------
filled : Index
"""

@Appender(_index_shared_docs["fillna"])
def fillna(self, value=None, downcast=None):
self._assert_can_do_op(value)
if self.hasnans:
result = self.putmask(self._isnan, value)
Expand All @@ -2187,9 +2167,8 @@ def fillna(self, value=None, downcast=None):
return Index(result, name=self.name)
return self._shallow_copy()

_index_shared_docs[
"dropna"
] = """
def dropna(self, how="any"):
"""
Return Index without NA/NaN values.

Parameters
Expand All @@ -2202,9 +2181,6 @@ def fillna(self, value=None, downcast=None):
-------
valid : Index
"""

@Appender(_index_shared_docs["dropna"])
def dropna(self, how="any"):
if how not in ("any", "all"):
raise ValueError(f"invalid how option: {how}")

Expand All @@ -2215,9 +2191,8 @@ def dropna(self, how="any"):
# --------------------------------------------------------------------
# Uniqueness Methods

_index_shared_docs[
"index_unique"
] = """
def unique(self, level=None):
"""
Return unique values in the index. Uniques are returned in order
of appearance, this does NOT sort.

Expand All @@ -2237,9 +2212,6 @@ def dropna(self, how="any"):
unique
Series.unique
"""

@Appender(_index_shared_docs["index_unique"] % _index_doc_kwargs)
def unique(self, level=None):
if level is not None:
self._validate_index_level(level)
result = super().unique()
Expand Down Expand Up @@ -2625,9 +2597,9 @@ def _union(self, other, sort):
def _wrap_setop_result(self, other, result):
return self._constructor(result, name=get_op_result_name(self, other))

_index_shared_docs[
"intersection"
] = """
# TODO: standardize return type of non-union setops type(self vs other)
def intersection(self, other, sort=False):
"""
Form the intersection of two Index objects.

This returns a new Index with elements common to the index and `other`.
Expand Down Expand Up @@ -2661,10 +2633,6 @@ def _wrap_setop_result(self, other, result):
>>> idx1.intersection(idx2)
Int64Index([3, 4], dtype='int64')
"""

# TODO: standardize return type of non-union setops type(self vs other)
@Appender(_index_shared_docs["intersection"])
def intersection(self, other, sort=False):
self._validate_sort_keyword(sort)
self._assert_can_do_setop(other)
other = ensure_index(other)
Expand Down Expand Up @@ -2868,9 +2836,8 @@ def _convert_can_do_setop(self, other):
# --------------------------------------------------------------------
# Indexing Methods

_index_shared_docs[
"get_loc"
] = """
def get_loc(self, key, method=None, tolerance=None):
"""
Get integer location, slice or boolean mask for requested label.

Parameters
Expand Down Expand Up @@ -2907,9 +2874,6 @@ def _convert_can_do_setop(self, other):
>>> non_monotonic_index.get_loc('b')
array([False, True, False, True], dtype=bool)
"""

@Appender(_index_shared_docs["get_loc"])
def get_loc(self, key, method=None, tolerance=None):
if method is None:
if tolerance is not None:
raise ValueError(
Expand Down Expand Up @@ -3121,19 +3085,15 @@ def _filter_indexer_tolerance(
# --------------------------------------------------------------------
# Indexer Conversion Methods

_index_shared_docs[
"_convert_scalar_indexer"
] = """
def _convert_scalar_indexer(self, key, kind=None):
"""
Convert a scalar indexer.

Parameters
----------
key : label of the slice bound
kind : {'loc', 'getitem', 'iloc'} or None
"""

@Appender(_index_shared_docs["_convert_scalar_indexer"])
def _convert_scalar_indexer(self, key, kind=None):
"""
assert kind in ["loc", "getitem", "iloc", None]

if kind == "iloc":
Expand Down Expand Up @@ -3171,9 +3131,8 @@ def _convert_scalar_indexer(self, key, kind=None):

return key

_index_shared_docs[
"_convert_slice_indexer"
] = """
def _convert_slice_indexer(self, key: slice, kind=None):
"""
Convert a slice indexer.

By definition, these are labels unless 'iloc' is passed in.
Expand All @@ -3183,10 +3142,7 @@ def _convert_scalar_indexer(self, key, kind=None):
----------
key : label of the slice bound
kind : {'loc', 'getitem', 'iloc'} or None
"""

@Appender(_index_shared_docs["_convert_slice_indexer"])
def _convert_slice_indexer(self, key: slice, kind=None):
"""
assert kind in ["loc", "getitem", "iloc", None]

# validate iloc
Expand Down Expand Up @@ -3266,9 +3222,8 @@ def _convert_listlike_indexer(self, keyarr, kind=None):
indexer = self._convert_list_indexer(keyarr, kind=kind)
return indexer, keyarr

_index_shared_docs[
"_convert_arr_indexer"
] = """
def _convert_arr_indexer(self, keyarr):
"""
Convert an array-like indexer to the appropriate dtype.

Parameters
Expand All @@ -3279,16 +3234,12 @@ def _convert_listlike_indexer(self, keyarr, kind=None):
Returns
-------
converted_keyarr : array-like
"""

@Appender(_index_shared_docs["_convert_arr_indexer"])
def _convert_arr_indexer(self, keyarr):
"""
keyarr = com.asarray_tuplesafe(keyarr)
return keyarr

_index_shared_docs[
"_convert_index_indexer"
] = """
def _convert_index_indexer(self, keyarr):
"""
Convert an Index indexer to the appropriate dtype.

Parameters
Expand All @@ -3299,15 +3250,11 @@ def _convert_arr_indexer(self, keyarr):
Returns
-------
converted_keyarr : Index (or sub-class)
"""

@Appender(_index_shared_docs["_convert_index_indexer"])
def _convert_index_indexer(self, keyarr):
"""
return keyarr

_index_shared_docs[
"_convert_list_indexer"
] = """
def _convert_list_indexer(self, keyarr, kind=None):
"""
Convert a list-like indexer to the appropriate dtype.

Parameters
Expand All @@ -3319,10 +3266,7 @@ def _convert_index_indexer(self, keyarr):
Returns
-------
positional indexer or None
"""

@Appender(_index_shared_docs["_convert_list_indexer"])
def _convert_list_indexer(self, keyarr, kind=None):
"""
if (
kind in [None, "iloc"]
and is_integer_dtype(keyarr)
Expand Down Expand Up @@ -3502,9 +3446,8 @@ def _reindex_non_unique(self, target):
# --------------------------------------------------------------------
# Join Methods

_index_shared_docs[
"join"
] = """
def join(self, other, how="left", level=None, return_indexers=False, sort=False):
"""
Compute join_index and indexers to conform data
structures to the new index.

Expand All @@ -3522,9 +3465,6 @@ def _reindex_non_unique(self, target):
-------
join_index, (left_indexer, right_indexer)
"""

@Appender(_index_shared_docs["join"])
def join(self, other, how="left", level=None, return_indexers=False, sort=False):
self_is_mi = isinstance(self, ABCMultiIndex)
other_is_mi = isinstance(other, ABCMultiIndex)

Expand Down Expand Up @@ -4033,9 +3973,8 @@ def memory_usage(self, deep: bool = False) -> int:
result += self._engine.sizeof(deep=deep)
return result

_index_shared_docs[
"where"
] = """
def where(self, cond, other=None):
"""
Return an Index of same shape as self and whose corresponding
entries are from self where cond is True and otherwise are from
other.
Expand All @@ -4049,9 +3988,6 @@ def memory_usage(self, deep: bool = False) -> int:
-------
Index
"""

@Appender(_index_shared_docs["where"])
def where(self, cond, other=None):
if other is None:
other = self._na_value

Expand Down Expand Up @@ -4146,9 +4082,8 @@ def is_type_compatible(self, kind) -> bool:
"""
return kind == self.inferred_type

_index_shared_docs[
"contains"
] = """
def __contains__(self, key: Any) -> bool:
"""
Return a boolean indicating whether the provided key is in the index.

Parameters
Expand Down Expand Up @@ -4177,9 +4112,6 @@ def is_type_compatible(self, kind) -> bool:
>>> 6 in idx
False
"""

@Appender(_index_shared_docs["contains"] % _index_doc_kwargs)
def __contains__(self, key: Any) -> bool:
hash(key)
try:
return key in self._engine
Expand Down Expand Up @@ -5020,9 +4952,8 @@ def _validate_indexer(self, form: str_t, key, kind: str_t):
else:
self._invalid_indexer(form, key)

_index_shared_docs[
"_maybe_cast_slice_bound"
] = """
def _maybe_cast_slice_bound(self, label, side: str_t, kind):
"""
This function should be overloaded in subclasses that allow non-trivial
casting on label-slice bounds, e.g. datetime-like indices allowing
strings containing formatted datetimes.
Expand All @@ -5041,9 +4972,6 @@ def _validate_indexer(self, form: str_t, key, kind: str_t):
-----
Value of `side` parameter should be validated in caller.
"""

@Appender(_index_shared_docs["_maybe_cast_slice_bound"])
def _maybe_cast_slice_bound(self, label, side: str_t, kind):
assert kind in ["loc", "getitem", None]

# We are a plain index here (sub-class override this method if they
Expand Down
Loading