Skip to content

CLN: assorted follow-ups #49489

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Nov 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/source/getting_started/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,7 @@ Dependency Minimum Version optional_extra Notes
PyTables 3.6.1 hdf5 HDF5-based reading / writing
blosc 1.21.0 hdf5 Compression for HDF5
zlib hdf5 Compression for HDF5
fastparquet 0.4.0 - Parquet reading / writing (pyarrow is default)
fastparquet 0.6.3 - Parquet reading / writing (pyarrow is default)
pyarrow 6.0.0 parquet, feather Parquet, ORC, and feather reading / writing
pyreadstat 1.1.2 spss SPSS files (.sav) reading
odfpy 1.4.1 excel Open document format (.odf, .ods, .odt) reading / writing
Expand Down
1 change: 1 addition & 0 deletions pandas/core/dtypes/astype.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ def astype_array(values: ArrayLike, dtype: DtypeObj, copy: bool = False) -> Arra
# Series.astype behavior pre-2.0 did
# values.tz_localize("UTC").tz_convert(dtype.tz)
# which did not match the DTA/DTI behavior.
# We special-case here to give a Series-specific exception message.
raise TypeError(
"Cannot use .astype to convert from timezone-naive dtype to "
"timezone-aware dtype. Use ser.dt.tz_localize instead."
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/cast.py
Original file line number Diff line number Diff line change
Expand Up @@ -1414,7 +1414,7 @@ def _ensure_nanosecond_dtype(dtype: DtypeObj) -> DtypeObj:


# TODO: other value-dependent functions to standardize here include
# dtypes.concat.cast_to_common_type and Index._find_common_type_compat
# Index._find_common_type_compat
def find_result_type(left: ArrayLike, right: Any) -> DtypeObj:
"""
Find the type/dtype for a the result of an operation between these objects.
Expand Down
48 changes: 6 additions & 42 deletions pandas/core/dtypes/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,20 @@
"""
from __future__ import annotations

from typing import (
TYPE_CHECKING,
cast,
)
from typing import TYPE_CHECKING
import warnings

import numpy as np

from pandas._typing import (
ArrayLike,
AxisInt,
DtypeObj,
)
from pandas._typing import AxisInt
from pandas.util._exceptions import find_stack_level

from pandas.core.dtypes.astype import astype_array
from pandas.core.dtypes.cast import (
common_dtype_categorical_compat,
find_common_type,
)
from pandas.core.dtypes.common import (
is_dtype_equal,
is_sparse,
)
from pandas.core.dtypes.common import is_dtype_equal
from pandas.core.dtypes.dtypes import (
DatetimeTZDtype,
ExtensionDtype,
Expand All @@ -39,34 +29,6 @@

if TYPE_CHECKING:
from pandas.core.arrays import Categorical
from pandas.core.arrays.sparse import SparseArray


def cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike:
"""
Helper function for `arr.astype(common_dtype)` but handling all special
cases.
"""
if is_dtype_equal(arr.dtype, dtype):
return arr

if is_sparse(arr) and not is_sparse(dtype):
# TODO(2.0): remove special case once SparseArray.astype deprecation
# is enforced.
# problem case: SparseArray.astype(dtype) doesn't follow the specified
# dtype exactly, but converts this to Sparse[dtype] -> first manually
# convert to dense array

# error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type
# "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, _
# SupportsDType[dtype[Any]], str, Union[Tuple[Any, int], Tuple[Any,
# Union[SupportsIndex, Sequence[SupportsIndex]]], List[Any], _DTypeDict,
# Tuple[Any, Any]]]" [arg-type]
arr = cast("SparseArray", arr)
return arr.to_dense().astype(dtype, copy=False) # type: ignore[arg-type]

# astype_array includes ensure_wrapped_if_datetimelike
return astype_array(arr, dtype=dtype, copy=False)


def concat_compat(to_concat, axis: AxisInt = 0, ea_compat_axis: bool = False):
Expand Down Expand Up @@ -126,7 +88,9 @@ def is_nonempty(x) -> bool:
if not single_dtype:
target_dtype = find_common_type([x.dtype for x in to_concat])
target_dtype = common_dtype_categorical_compat(to_concat, target_dtype)
to_concat = [cast_to_common_type(arr, target_dtype) for arr in to_concat]
to_concat = [
astype_array(arr, target_dtype, copy=False) for arr in to_concat
]

if isinstance(to_concat[0], ABCExtensionArray):
# TODO: what about EA-backed Index?
Expand Down
1 change: 0 additions & 1 deletion pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -5046,7 +5046,6 @@ def align(
broadcast_axis=broadcast_axis,
)

# error: Signature of "set_axis" incompatible with supertype "NDFrame"
@Appender(
"""
Examples
Expand Down
2 changes: 0 additions & 2 deletions pandas/core/indexes/category.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
Any,
Hashable,
)
import warnings

import numpy as np

Expand All @@ -18,7 +17,6 @@
cache_readonly,
doc,
)
from pandas.util._exceptions import find_stack_level

from pandas.core.dtypes.common import (
is_categorical_dtype,
Expand Down
22 changes: 1 addition & 21 deletions pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
Sequence,
Tuple,
cast,
overload,
)
import warnings

Expand Down Expand Up @@ -3739,28 +3738,9 @@ def isin(self, values, level=None) -> npt.NDArray[np.bool_]:
return np.zeros(len(levs), dtype=np.bool_)
return levs.isin(values)

@overload
def set_names(
self, names, *, level=..., inplace: Literal[False] = ...
) -> MultiIndex:
...

@overload
def set_names(self, names, *, level=..., inplace: Literal[True]) -> None:
...

@overload
def set_names(self, names, *, level=..., inplace: bool = ...) -> MultiIndex | None:
...

def set_names(
self, names, *, level=None, inplace: bool = False
) -> MultiIndex | None:
return super().set_names(names=names, level=level, inplace=inplace)

# error: Incompatible types in assignment (expression has type overloaded function,
# base class "Index" defined the type as "Callable[[Index, Any, bool], Any]")
rename = set_names # type: ignore[assignment]
rename = Index.set_names # type: ignore[assignment]

# ---------------------------------------------------------------
# Arithmetic/Numeric Methods - Disabled
Expand Down
8 changes: 3 additions & 5 deletions pandas/core/internals/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
)
from pandas.util._decorators import cache_readonly

from pandas.core.dtypes.astype import astype_array
from pandas.core.dtypes.cast import (
ensure_dtype_can_hold_na,
find_common_type,
Expand All @@ -34,10 +35,7 @@
is_scalar,
needs_i8_conversion,
)
from pandas.core.dtypes.concat import (
cast_to_common_type,
concat_compat,
)
from pandas.core.dtypes.concat import concat_compat
from pandas.core.dtypes.dtypes import (
DatetimeTZDtype,
ExtensionDtype,
Expand Down Expand Up @@ -153,7 +151,7 @@ def concat_arrays(to_concat: list) -> ArrayLike:
to_concat = [
arr.to_array(target_dtype)
if isinstance(arr, NullArrayProxy)
else cast_to_common_type(arr, target_dtype)
else astype_array(arr, target_dtype, copy=False)
for arr in to_concat
]

Expand Down
1 change: 0 additions & 1 deletion pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -4925,7 +4925,6 @@ def rename(
else:
return self._set_name(index, inplace=inplace)

# error: Signature of "set_axis" incompatible with supertype "NDFrame"
@Appender(
"""
Examples
Expand Down
10 changes: 1 addition & 9 deletions pandas/core/sorting.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
Sequence,
cast,
)
import warnings

import numpy as np

Expand Down Expand Up @@ -341,14 +340,7 @@ def lexsort_indexer(
keys = [ensure_key_mapped(k, key) for k in keys]

for k, order in zip(keys, orders):
with warnings.catch_warnings():
# TODO(2.0): unnecessary once deprecation is enforced
# GH#45618 don't issue warning user can't do anything about
warnings.filterwarnings(
"ignore", ".*(SparseArray|SparseDtype).*", category=FutureWarning
)

cat = Categorical(k, ordered=True)
cat = Categorical(k, ordered=True)

if na_position not in ["last", "first"]:
raise ValueError(f"invalid na_position: {na_position}")
Expand Down
14 changes: 4 additions & 10 deletions pandas/io/formats/style.py
Original file line number Diff line number Diff line change
Expand Up @@ -3604,15 +3604,11 @@ def _background_gradient(
rng = smax - smin
# extend lower / upper bounds, compresses color range
norm = mpl.colors.Normalize(smin - (rng * low), smax + (rng * high))
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0

if mpl_ge_3_6_0():
if cmap is None:
rgbas = mpl.colormaps[mpl.rcParams["image.cmap"]](norm(gmap))
else:
rgbas = mpl.colormaps.get_cmap(cmap)(norm(gmap))
if cmap is None:
rgbas = mpl.colormaps[mpl.rcParams["image.cmap"]](norm(gmap))
else:
rgbas = plt.cm.get_cmap(cmap)(norm(gmap))
rgbas = mpl.colormaps.get_cmap(cmap)(norm(gmap))

def relative_luminance(rgba) -> float:
"""
Expand Down Expand Up @@ -3891,10 +3887,8 @@ def css_calc(x, left: float, right: float, align: str, color: str | list | tuple
if cmap is not None:
# use the matplotlib colormap input
with _mpl(Styler.bar) as (plt, mpl):
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0

cmap = (
(mpl.colormaps[cmap] if mpl_ge_3_6_0() else mpl.cm.get_cmap(cmap))
mpl.colormaps[cmap]
if isinstance(cmap, str)
else cmap # assumed to be a Colormap instance as documented
)
Expand Down
7 changes: 0 additions & 7 deletions pandas/plotting/_matplotlib/compat.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# being a bit too dynamic
from __future__ import annotations

import operator

from pandas.util.version import Version


Expand All @@ -15,8 +13,3 @@ def inner():
return op(Version(mpl.__version__), Version(version))

return inner


mpl_ge_3_4_0 = _mpl_version("3.4.0", operator.ge)
mpl_ge_3_5_0 = _mpl_version("3.5.0", operator.ge)
mpl_ge_3_6_0 = _mpl_version("3.6.0", operator.ge)
16 changes: 3 additions & 13 deletions pandas/plotting/_matplotlib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
from pandas.core.frame import DataFrame

from pandas.io.formats.printing import pprint_thing
from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0
from pandas.plotting._matplotlib.converter import register_pandas_matplotlib_converters
from pandas.plotting._matplotlib.groupby import reconstruct_data_with_by
from pandas.plotting._matplotlib.misc import unpack_single_str_list
Expand Down Expand Up @@ -1229,19 +1228,13 @@ def _make_plot(self):
c_values = c

if self.colormap is not None:
if mpl_ge_3_6_0():
cmap = mpl.colormaps.get_cmap(self.colormap)
else:
cmap = self.plt.cm.get_cmap(self.colormap)
cmap = mpl.colormaps.get_cmap(self.colormap)
else:
# cmap is only used if c_values are integers, otherwise UserWarning
if is_integer_dtype(c_values):
# pandas uses colormap, matplotlib uses cmap.
cmap = "Greys"
if mpl_ge_3_6_0():
cmap = mpl.colormaps[cmap]
else:
cmap = self.plt.cm.get_cmap(cmap)
cmap = mpl.colormaps[cmap]
else:
cmap = None

Expand Down Expand Up @@ -1309,10 +1302,7 @@ def _make_plot(self) -> None:
ax = self.axes[0]
# pandas uses colormap, matplotlib uses cmap.
cmap = self.colormap or "BuGn"
if mpl_ge_3_6_0():
cmap = mpl.colormaps.get_cmap(cmap)
else:
cmap = self.plt.cm.get_cmap(cmap)
cmap = mpl.colormaps.get_cmap(cmap)
cb = self.kwds.pop("colorbar", True)

if C is None:
Expand Down
8 changes: 1 addition & 7 deletions pandas/plotting/_matplotlib/style.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import warnings

import matplotlib as mpl
from matplotlib import cm
import matplotlib.colors
import numpy as np

Expand All @@ -21,8 +20,6 @@

import pandas.core.common as com

from pandas.plotting._matplotlib.compat import mpl_ge_3_6_0

if TYPE_CHECKING:
from matplotlib.colors import Colormap

Expand Down Expand Up @@ -153,10 +150,7 @@ def _get_cmap_instance(colormap: str | Colormap) -> Colormap:
"""Get instance of matplotlib colormap."""
if isinstance(colormap, str):
cmap = colormap
if mpl_ge_3_6_0():
colormap = mpl.colormaps[colormap]
else:
colormap = cm.get_cmap(colormap)
colormap = mpl.colormaps[colormap]
if colormap is None:
raise ValueError(f"Colormap {cmap} is not recognized")
return colormap
Expand Down
12 changes: 2 additions & 10 deletions pandas/plotting/_matplotlib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@
ABCSeries,
)

from pandas.plotting._matplotlib import compat

if TYPE_CHECKING:
from matplotlib.axes import Axes
from matplotlib.axis import Axis
Expand Down Expand Up @@ -396,10 +394,7 @@ def handle_shared_axes(
row_num = lambda x: x.get_subplotspec().rowspan.start
col_num = lambda x: x.get_subplotspec().colspan.start

if compat.mpl_ge_3_4_0():
is_first_col = lambda x: x.get_subplotspec().is_first_col()
else:
is_first_col = lambda x: x.is_first_col()
is_first_col = lambda x: x.get_subplotspec().is_first_col()

if nrows > 1:
try:
Expand All @@ -421,10 +416,7 @@ def handle_shared_axes(
except IndexError:
# if gridspec is used, ax.rowNum and ax.colNum may different
# from layout shape. in this case, use last_row logic
if compat.mpl_ge_3_4_0():
is_last_row = lambda x: x.get_subplotspec().is_last_row()
else:
is_last_row = lambda x: x.is_last_row()
is_last_row = lambda x: x.get_subplotspec().is_last_row()
for ax in axarr:
if is_last_row(ax):
continue
Expand Down
Loading