Skip to content

CLN: D202 No blank lines allowed after function docstring #31895

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 0 additions & 10 deletions pandas/_config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,6 @@ def _select_options(pat: str) -> List[str]:

if pat=="all", returns all registered options
"""

# short-circuit for exact key
if pat in _registered_options:
return [pat]
Expand All @@ -573,7 +572,6 @@ def _get_root(key: str) -> Tuple[Dict[str, Any], str]:

def _is_deprecated(key: str) -> bool:
""" Returns True if the given option has been deprecated """

key = key.lower()
return key in _deprecated_options

Expand All @@ -586,7 +584,6 @@ def _get_deprecated_option(key: str):
-------
DeprecatedOption (namedtuple) if key is deprecated, None otherwise
"""

try:
d = _deprecated_options[key]
except KeyError:
Expand All @@ -611,7 +608,6 @@ def _translate_key(key: str) -> str:
if key id deprecated and a replacement key defined, will return the
replacement key, otherwise returns `key` as - is
"""

d = _get_deprecated_option(key)
if d:
return d.rkey or key
Expand All @@ -627,7 +623,6 @@ def _warn_if_deprecated(key: str) -> bool:
-------
bool - True if `key` is deprecated, False otherwise.
"""

d = _get_deprecated_option(key)
if d:
if d.msg:
Expand All @@ -649,7 +644,6 @@ def _warn_if_deprecated(key: str) -> bool:

def _build_option_description(k: str) -> str:
""" Builds a formatted description of a registered option and prints it """

o = _get_registered_option(k)
d = _get_deprecated_option(k)

Expand All @@ -674,7 +668,6 @@ def _build_option_description(k: str) -> str:

def pp_options_list(keys: Iterable[str], width=80, _print: bool = False):
""" Builds a concise listing of available options, grouped by prefix """

from textwrap import wrap
from itertools import groupby

Expand Down Expand Up @@ -738,7 +731,6 @@ def config_prefix(prefix):
will register options "display.font.color", "display.font.size", set the
value of "display.font.size"... and so on.
"""

# Note: reset_option relies on set_option, and on key directly
# it does not fit in to this monkey-patching scheme

Expand Down Expand Up @@ -801,7 +793,6 @@ def is_instance_factory(_type) -> Callable[[Any], None]:
ValueError if x is not an instance of `_type`

"""

if isinstance(_type, (tuple, list)):
_type = tuple(_type)
type_repr = "|".join(map(str, _type))
Expand Down Expand Up @@ -848,7 +839,6 @@ def is_nonnegative_int(value: Optional[int]) -> None:
ValueError
When the value is not None or is a negative integer
"""

if value is None:
return

Expand Down
1 change: 0 additions & 1 deletion pandas/_config/localization.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def can_set_locale(lc: str, lc_var: int = locale.LC_ALL) -> bool:
bool
Whether the passed locale can be set
"""

try:
with set_locale(lc, lc_var=lc_var):
pass
Expand Down
6 changes: 0 additions & 6 deletions pandas/_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1508,7 +1508,6 @@ def assert_sp_array_equal(
create a new BlockIndex for that array, with consolidated
block indices.
"""

_check_isinstance(left, right, pd.arrays.SparseArray)

assert_numpy_array_equal(left.sp_values, right.sp_values, check_dtype=check_dtype)
Expand Down Expand Up @@ -1876,7 +1875,6 @@ def makeCustomIndex(

if unspecified, string labels will be generated.
"""

if ndupe_l is None:
ndupe_l = [1] * nlevels
assert is_sequence(ndupe_l) and len(ndupe_l) <= nlevels
Expand Down Expand Up @@ -2025,7 +2023,6 @@ def makeCustomDataframe(

>> a=mkdf(5,3,r_idx_nlevels=2,c_idx_nlevels=4)
"""

assert c_idx_nlevels > 0
assert r_idx_nlevels > 0
assert r_idx_type is None or (
Expand Down Expand Up @@ -2229,7 +2226,6 @@ def can_connect(url, error_classes=None):
Return True if no IOError (unable to connect) or URLError (bad url) was
raised
"""

if error_classes is None:
error_classes = _get_default_network_errors()

Expand Down Expand Up @@ -2603,7 +2599,6 @@ def test_parallel(num_threads=2, kwargs_list=None):
https://github.com/scikit-image/scikit-image/pull/1519

"""

assert num_threads > 0
has_kwargs_list = kwargs_list is not None
if has_kwargs_list:
Expand Down Expand Up @@ -2685,7 +2680,6 @@ def set_timezone(tz: str):
...
'EDT'
"""

import os
import time

Expand Down
5 changes: 0 additions & 5 deletions pandas/compat/numpy/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def validate_argmin_with_skipna(skipna, args, kwargs):
'skipna' parameter is either an instance of ndarray or
is None, since 'skipna' itself should be a boolean
"""

skipna, args = process_skipna(skipna, args)
validate_argmin(args, kwargs)
return skipna
Expand All @@ -113,7 +112,6 @@ def validate_argmax_with_skipna(skipna, args, kwargs):
'skipna' parameter is either an instance of ndarray or
is None, since 'skipna' itself should be a boolean
"""

skipna, args = process_skipna(skipna, args)
validate_argmax(args, kwargs)
return skipna
Expand Down Expand Up @@ -151,7 +149,6 @@ def validate_argsort_with_ascending(ascending, args, kwargs):
either integer type or is None, since 'ascending' itself should
be a boolean
"""

if is_integer(ascending) or ascending is None:
args = (ascending,) + args
ascending = True
Expand All @@ -173,7 +170,6 @@ def validate_clip_with_axis(axis, args, kwargs):
so check if the 'axis' parameter is an instance of ndarray, since
'axis' itself should either be an integer or None
"""

if isinstance(axis, ndarray):
args = (axis,) + args
axis = None
Expand Down Expand Up @@ -298,7 +294,6 @@ def validate_take_with_convert(convert, args, kwargs):
ndarray or 'None', so check if the 'convert' parameter is either
an instance of ndarray or is None
"""

if isinstance(convert, ndarray) or convert is None:
args = (convert,) + args
convert = True
Expand Down
1 change: 0 additions & 1 deletion pandas/compat/pickle_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,6 @@ def load(fh, encoding: Optional[str] = None, is_verbose: bool = False):
encoding : an optional encoding
is_verbose : show exception output
"""

try:
fh.seek(0)
if encoding is not None:
Expand Down
2 changes: 0 additions & 2 deletions pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def ip():

Will raise a skip if IPython is not installed.
"""

pytest.importorskip("IPython", minversion="6.0.0")
from IPython.core.interactiveshell import InteractiveShell

Expand Down Expand Up @@ -679,7 +678,6 @@ def any_nullable_int_dtype(request):
* 'UInt64'
* 'Int64'
"""

return request.param


Expand Down
4 changes: 0 additions & 4 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def _ensure_data(values, dtype=None):
values : ndarray
pandas_dtype : str or dtype
"""

# we check some simple dtypes first
if is_object_dtype(dtype):
return ensure_object(np.asarray(values)), "object"
Expand Down Expand Up @@ -182,7 +181,6 @@ def _reconstruct_data(values, dtype, original):
-------
Index for extension types, otherwise ndarray casted to dtype
"""

if is_extension_array_dtype(dtype):
values = dtype.construct_array_type()._from_sequence(values)
elif is_bool_dtype(dtype):
Expand Down Expand Up @@ -368,7 +366,6 @@ def unique(values):
>>> pd.unique([('a', 'b'), ('b', 'a'), ('a', 'c'), ('b', 'a')])
array([('a', 'b'), ('b', 'a'), ('a', 'c')], dtype=object)
"""

values = _ensure_arraylike(values)

if is_extension_array_dtype(values):
Expand Down Expand Up @@ -796,7 +793,6 @@ def duplicated(values, keep="first") -> np.ndarray:
-------
duplicated : ndarray
"""

values, _ = _ensure_data(values)
ndtype = values.dtype.name
f = getattr(htable, f"duplicated_{ndtype}")
Expand Down
5 changes: 0 additions & 5 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def frame_apply(
kwds=None,
):
""" construct and return a row or column based frame apply object """

axis = obj._get_axis_number(axis)
klass: Type[FrameApply]
if axis == 0:
Expand Down Expand Up @@ -144,7 +143,6 @@ def agg_axis(self) -> "Index":

def get_result(self):
""" compute the results """

# dispatch to agg
if is_list_like(self.f) or is_dict_like(self.f):
return self.obj.aggregate(self.f, axis=self.axis, *self.args, **self.kwds)
Expand Down Expand Up @@ -193,7 +191,6 @@ def apply_empty_result(self):
we will try to apply the function to an empty
series in order to see if this is a reduction function
"""

# we are not asked to reduce or infer reduction
# so just return a copy of the existing object
if self.result_type not in ["reduce", None]:
Expand Down Expand Up @@ -396,7 +393,6 @@ def wrap_results_for_axis(
self, results: ResType, res_index: "Index"
) -> "DataFrame":
""" return the results for the rows """

result = self.obj._constructor(data=results)

if not isinstance(results[0], ABCSeries):
Expand Down Expand Up @@ -457,7 +453,6 @@ def wrap_results_for_axis(

def infer_to_same_shape(self, results: ResType, res_index: "Index") -> "DataFrame":
""" infer the results to the same shape as the input object """

result = self.obj._constructor(data=results)
result = result.T

Expand Down
6 changes: 0 additions & 6 deletions pandas/core/arrays/categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,7 +695,6 @@ def _set_categories(self, categories, fastpath=False):
[a, c]
Categories (2, object): [a, c]
"""

if fastpath:
new_dtype = CategoricalDtype._from_fastpath(categories, self.ordered)
else:
Expand Down Expand Up @@ -1221,7 +1220,6 @@ def shape(self):
-------
shape : tuple
"""

return tuple([len(self._codes)])

def shift(self, periods, fill_value=None):
Expand Down Expand Up @@ -1378,7 +1376,6 @@ def isna(self):
Categorical.notna : Boolean inverse of Categorical.isna.

"""

ret = self._codes == -1
return ret

Expand Down Expand Up @@ -1928,7 +1925,6 @@ def _repr_categories_info(self) -> str:
"""
Returns a string representation of the footer.
"""

category_strs = self._repr_categories()
dtype = str(self.categories.dtype)
levheader = f"Categories ({len(self.categories)}, {dtype}): "
Expand Down Expand Up @@ -2254,7 +2250,6 @@ def unique(self):
Series.unique

"""

# unlike np.unique, unique1d does not sort
unique_codes = unique1d(self.codes)
cat = self.copy()
Expand Down Expand Up @@ -2314,7 +2309,6 @@ def is_dtype_equal(self, other):
-------
bool
"""

try:
return hash(self.dtype) == hash(other.dtype)
except (AttributeError, TypeError):
Expand Down
2 changes: 0 additions & 2 deletions pandas/core/arrays/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,6 @@ def __getitem__(self, key):
This getitem defers to the underlying array, which by-definition can
only handle list-likes, slices, and integer scalars
"""

is_int = lib.is_integer(key)
if lib.is_scalar(key) and not is_int:
raise IndexError(
Expand Down Expand Up @@ -892,7 +891,6 @@ def _maybe_mask_results(self, result, fill_value=iNaT, convert=None):

This is an internal routine.
"""

if self._hasnans:
if convert:
result = result.astype(convert)
Expand Down
2 changes: 0 additions & 2 deletions pandas/core/arrays/integer.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@ def safe_cast(values, dtype, copy: bool):
ints.

"""

try:
return values.astype(dtype, casting="safe", copy=copy)
except TypeError:
Expand Down Expand Up @@ -601,7 +600,6 @@ def _maybe_mask_result(self, result, mask, other, op_name):
other : scalar or array-like
op_name : str
"""

# if we have a float operand we are by-definition
# a float result
# or our op is a divide
Expand Down
1 change: 0 additions & 1 deletion pandas/core/arrays/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,6 @@ def _addsub_int_array(
-------
result : PeriodArray
"""

assert op in [operator.add, operator.sub]
if op is operator.sub:
other = -other
Expand Down
1 change: 0 additions & 1 deletion pandas/core/arrays/sparse/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -1503,7 +1503,6 @@ def make_sparse(arr, kind="block", fill_value=None, dtype=None, copy=False):
-------
(sparse_values, index, fill_value) : (ndarray, SparseIndex, Scalar)
"""

arr = com.values_from_object(arr)

if arr.ndim > 1:
Expand Down
2 changes: 0 additions & 2 deletions pandas/core/arrays/sparse/scipy_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ def _to_ijv(ss, row_levels=(0,), column_levels=(1,), sort_labels=False):

def get_indexers(levels):
""" Return sparse coords and dense labels for subset levels """

# TODO: how to do this better? cleanly slice nonnull_labels given the
# coord
values_ilabels = [tuple(x[i] for i in levels) for x in nonnull_labels.index]
Expand Down Expand Up @@ -90,7 +89,6 @@ def _sparse_series_to_coo(ss, row_levels=(0,), column_levels=(1,), sort_labels=F
levels row_levels, column_levels as the row and column
labels respectively. Returns the sparse_matrix, row and column labels.
"""

import scipy.sparse

if ss.index.nlevels < 2:
Expand Down
2 changes: 0 additions & 2 deletions pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,6 @@ def apply_if_callable(maybe_callable, obj, **kwargs):
obj : NDFrame
**kwargs
"""

if callable(maybe_callable):
return maybe_callable(obj, **kwargs)

Expand Down Expand Up @@ -412,7 +411,6 @@ def random_state(state=None):
-------
np.random.RandomState
"""

if is_integer(state):
return np.random.RandomState(state)
elif isinstance(state, np.random.RandomState):
Expand Down
Loading