Skip to content

Commit 663185e

Browse files
authored
TYP: sparse.pyi (#43514)
1 parent 2c4739e commit 663185e

File tree

2 files changed

+59
-12
lines changed

2 files changed

+59
-12
lines changed

pandas/_libs/sparse.pyi

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
from typing import (
2+
Sequence,
3+
TypeVar,
4+
)
5+
6+
import numpy as np
7+
8+
from pandas._typing import npt
9+
10+
SparseIndexT = TypeVar("SparseIndexT", bound="SparseIndex")
11+
12+
class SparseIndex:
13+
length: int
14+
npoints: int
15+
def __init__(self): ...
16+
@property
17+
def ngaps(self) -> int: ...
18+
@property
19+
def nbytes(self) -> int: ...
20+
def equals(self, other) -> bool: ...
21+
def lookup(self, index: int) -> np.int32: ...
22+
def lookup_array(self, indexer: npt.NDArray[np.int32]) -> npt.NDArray[np.int32]: ...
23+
def to_int_index(self) -> IntIndex: ...
24+
def to_block_index(self) -> BlockIndex: ...
25+
def intersect(self: SparseIndexT, y_: SparseIndex) -> SparseIndexT: ...
26+
def make_union(self: SparseIndexT, y_: SparseIndex) -> SparseIndexT: ...
27+
28+
class IntIndex(SparseIndex):
29+
indices: npt.NDArray[np.int32]
30+
def __init__(
31+
self, length: int, indices: Sequence[int], check_integrity: bool = True
32+
): ...
33+
34+
class BlockIndex(SparseIndex):
35+
nblocks: int
36+
blocs: np.ndarray
37+
blengths: np.ndarray
38+
def __init__(self, length: int, blocs: np.ndarray, blengths: np.ndarray): ...
39+
40+
def make_mask_object_ndarray(
41+
arr: npt.NDArray[np.object_], fill_value
42+
) -> npt.NDArray[np.bool_]: ...
43+
def get_blocks(
44+
indices: npt.NDArray[np.int32],
45+
) -> tuple[npt.NDArray[np.int32], npt.NDArray[np.int32]]: ...

pandas/core/arrays/sparse/array.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1063,20 +1063,21 @@ def _concat_same_type(
10631063
else:
10641064
sp_kind = "integer"
10651065

1066+
sp_index: SparseIndex
10661067
if sp_kind == "integer":
10671068
indices = []
10681069

10691070
for arr in to_concat:
1070-
idx = arr.sp_index.to_int_index().indices.copy()
1071-
idx += length # TODO: wraparound
1071+
int_idx = arr.sp_index.to_int_index().indices.copy()
1072+
int_idx += length # TODO: wraparound
10721073
length += arr.sp_index.length
10731074

10741075
values.append(arr.sp_values)
1075-
indices.append(idx)
1076+
indices.append(int_idx)
10761077

10771078
data = np.concatenate(values)
1078-
indices = np.concatenate(indices)
1079-
sp_index = IntIndex(length, indices)
1079+
indices_arr = np.concatenate(indices)
1080+
sp_index = IntIndex(length, indices_arr)
10801081

10811082
else:
10821083
# when concatenating block indices, we don't claim that you'll
@@ -1088,18 +1089,18 @@ def _concat_same_type(
10881089
blocs = []
10891090

10901091
for arr in to_concat:
1091-
idx = arr.sp_index.to_block_index()
1092+
block_idx = arr.sp_index.to_block_index()
10921093

10931094
values.append(arr.sp_values)
1094-
blocs.append(idx.blocs.copy() + length)
1095-
blengths.append(idx.blengths)
1095+
blocs.append(block_idx.blocs.copy() + length)
1096+
blengths.append(block_idx.blengths)
10961097
length += arr.sp_index.length
10971098

10981099
data = np.concatenate(values)
1099-
blocs = np.concatenate(blocs)
1100-
blengths = np.concatenate(blengths)
1100+
blocs_arr = np.concatenate(blocs)
1101+
blengths_arr = np.concatenate(blengths)
11011102

1102-
sp_index = BlockIndex(length, blocs, blengths)
1103+
sp_index = BlockIndex(length, blocs_arr, blengths_arr)
11031104

11041105
return cls(data, sparse_index=sp_index, fill_value=fill_value)
11051106

@@ -1666,8 +1667,9 @@ def make_sparse(
16661667
return sparsified_values, index, fill_value
16671668

16681669

1669-
def make_sparse_index(length, indices, kind):
1670+
def make_sparse_index(length, indices, kind) -> SparseIndex:
16701671

1672+
index: SparseIndex
16711673
if kind == "block" or isinstance(kind, BlockIndex):
16721674
locs, lens = splib.get_blocks(indices)
16731675
index = BlockIndex(length, locs, lens)

0 commit comments

Comments
 (0)