Skip to content

Commit 4a4292b

Browse files
committed
TST: move some tests to slow
1 parent 84e2f31 commit 4a4292b

File tree

4 files changed

+62
-58
lines changed

4 files changed

+62
-58
lines changed

pandas/tests/test_graphics.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3169,6 +3169,7 @@ def test_pie_df_nan(self):
31693169
ax.get_legend().get_texts()],
31703170
base_expected[:i] + base_expected[i+1:])
31713171

3172+
@slow
31723173
def test_errorbar_plot(self):
31733174
d = {'x': np.arange(12), 'y': np.arange(12, 0, -1)}
31743175
df = DataFrame(d)

pandas/tests/test_groupby.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1655,6 +1655,7 @@ def check_nunique(df, keys):
16551655
check_nunique(frame, ['jim'])
16561656
check_nunique(frame, ['jim', 'joe'])
16571657

1658+
@slow
16581659
def test_series_groupby_value_counts(self):
16591660
from itertools import product
16601661

pandas/tests/test_indexing.py

Lines changed: 58 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525

2626
import pandas.util.testing as tm
2727
from pandas import date_range
28+
from numpy.testing.decorators import slow
2829

2930
_verbose = False
3031

@@ -1689,74 +1690,71 @@ def test_multiindex_perf_warn(self):
16891690
with tm.assert_produces_warning(PerformanceWarning):
16901691
_ = df.loc[(0,)]
16911692

1693+
@slow
16921694
def test_multiindex_get_loc(self): # GH7724, GH2646
16931695

1694-
# ignore the warning here
1695-
warnings.simplefilter('ignore', PerformanceWarning)
1696+
with warnings.catch_warnings(PerformanceWarning):
16961697

1697-
# test indexing into a multi-index before & past the lexsort depth
1698-
from numpy.random import randint, choice, randn
1699-
cols = ['jim', 'joe', 'jolie', 'joline', 'jolia']
1698+
# test indexing into a multi-index before & past the lexsort depth
1699+
from numpy.random import randint, choice, randn
1700+
cols = ['jim', 'joe', 'jolie', 'joline', 'jolia']
17001701

1701-
def validate(mi, df, key):
1702-
mask = np.ones(len(df)).astype('bool')
1702+
def validate(mi, df, key):
1703+
mask = np.ones(len(df)).astype('bool')
17031704

1704-
# test for all partials of this key
1705-
for i, k in enumerate(key):
1706-
mask &= df.iloc[:, i] == k
1705+
# test for all partials of this key
1706+
for i, k in enumerate(key):
1707+
mask &= df.iloc[:, i] == k
17071708

1708-
if not mask.any():
1709-
self.assertNotIn(key[:i+1], mi.index)
1710-
continue
1711-
1712-
self.assertIn(key[:i+1], mi.index)
1713-
right = df[mask].copy()
1709+
if not mask.any():
1710+
self.assertNotIn(key[:i+1], mi.index)
1711+
continue
17141712

1715-
if i + 1 != len(key): # partial key
1716-
right.drop(cols[:i+1], axis=1, inplace=True)
1717-
right.set_index(cols[i+1:-1], inplace=True)
1718-
assert_frame_equal(mi.loc[key[:i+1]], right)
1713+
self.assertIn(key[:i+1], mi.index)
1714+
right = df[mask].copy()
17191715

1720-
else: # full key
1721-
right.set_index(cols[:-1], inplace=True)
1722-
if len(right) == 1: # single hit
1723-
right = Series(right['jolia'].values,
1724-
name=right.index[0], index=['jolia'])
1725-
assert_series_equal(mi.loc[key[:i+1]], right)
1726-
else: # multi hit
1716+
if i + 1 != len(key): # partial key
1717+
right.drop(cols[:i+1], axis=1, inplace=True)
1718+
right.set_index(cols[i+1:-1], inplace=True)
17271719
assert_frame_equal(mi.loc[key[:i+1]], right)
17281720

1729-
def loop(mi, df, keys):
1730-
for key in keys:
1731-
validate(mi, df, key)
1732-
1733-
n, m = 1000, 50
1734-
1735-
vals = [randint(0, 10, n), choice(list('abcdefghij'), n),
1736-
choice(pd.date_range('20141009', periods=10).tolist(), n),
1737-
choice(list('ZYXWVUTSRQ'), n), randn(n)]
1738-
vals = list(map(tuple, zip(*vals)))
1739-
1740-
# bunch of keys for testing
1741-
keys = [randint(0, 11, m), choice(list('abcdefghijk'), m),
1742-
choice(pd.date_range('20141009', periods=11).tolist(), m),
1743-
choice(list('ZYXWVUTSRQP'), m)]
1744-
keys = list(map(tuple, zip(*keys)))
1745-
keys += list(map(lambda t: t[:-1], vals[::n//m]))
1746-
1747-
# covers both unique index and non-unique index
1748-
df = pd.DataFrame(vals, columns=cols)
1749-
a, b = pd.concat([df, df]), df.drop_duplicates(subset=cols[:-1])
1750-
1751-
for frame in a, b:
1752-
for i in range(5): # lexsort depth
1753-
df = frame.copy() if i == 0 else frame.sort_values(by=cols[:i])
1754-
mi = df.set_index(cols[:-1])
1755-
assert not mi.index.lexsort_depth < i
1756-
loop(mi, df, keys)
1757-
1758-
# restore
1759-
warnings.simplefilter('always', PerformanceWarning)
1721+
else: # full key
1722+
right.set_index(cols[:-1], inplace=True)
1723+
if len(right) == 1: # single hit
1724+
right = Series(right['jolia'].values,
1725+
name=right.index[0], index=['jolia'])
1726+
assert_series_equal(mi.loc[key[:i+1]], right)
1727+
else: # multi hit
1728+
assert_frame_equal(mi.loc[key[:i+1]], right)
1729+
1730+
def loop(mi, df, keys):
1731+
for key in keys:
1732+
validate(mi, df, key)
1733+
1734+
n, m = 1000, 50
1735+
1736+
vals = [randint(0, 10, n), choice(list('abcdefghij'), n),
1737+
choice(pd.date_range('20141009', periods=10).tolist(), n),
1738+
choice(list('ZYXWVUTSRQ'), n), randn(n)]
1739+
vals = list(map(tuple, zip(*vals)))
1740+
1741+
# bunch of keys for testing
1742+
keys = [randint(0, 11, m), choice(list('abcdefghijk'), m),
1743+
choice(pd.date_range('20141009', periods=11).tolist(), m),
1744+
choice(list('ZYXWVUTSRQP'), m)]
1745+
keys = list(map(tuple, zip(*keys)))
1746+
keys += list(map(lambda t: t[:-1], vals[::n//m]))
1747+
1748+
# covers both unique index and non-unique index
1749+
df = pd.DataFrame(vals, columns=cols)
1750+
a, b = pd.concat([df, df]), df.drop_duplicates(subset=cols[:-1])
1751+
1752+
for frame in a, b:
1753+
for i in range(5): # lexsort depth
1754+
df = frame.copy() if i == 0 else frame.sort_values(by=cols[:i])
1755+
mi = df.set_index(cols[:-1])
1756+
assert not mi.index.lexsort_depth < i
1757+
loop(mi, df, keys)
17601758

17611759
def test_series_getitem_multiindex(self):
17621760

@@ -4653,13 +4651,15 @@ def test_indexing_dtypes_on_empty(self):
46534651
assert_series_equal(df2.loc[:,'a'], df2.iloc[:,0])
46544652
assert_series_equal(df2.loc[:,'a'], df2.ix[:,0])
46554653

4654+
@slow
46564655
def test_large_dataframe_indexing(self):
46574656
#GH10692
46584657
result = DataFrame({'x': range(10**6)},dtype='int64')
46594658
result.loc[len(result)] = len(result) + 1
46604659
expected = DataFrame({'x': range(10**6 + 1)},dtype='int64')
46614660
assert_frame_equal(result, expected)
46624661

4662+
@slow
46634663
def test_large_mi_dataframe_indexing(self):
46644664
#GH10645
46654665
result = MultiIndex.from_arrays([range(10**6), range(10**6)])

pandas/tools/tests/test_merge.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from pandas import isnull, DataFrame, Index, MultiIndex, Panel, Series, date_range, read_table, read_csv
2121
import pandas.algos as algos
2222
import pandas.util.testing as tm
23+
from numpy.testing.decorators import slow
2324

2425
a_ = np.array
2526

@@ -1410,6 +1411,7 @@ def test_merge_na_keys(self):
14101411

14111412
tm.assert_frame_equal(result, expected)
14121413

1414+
@slow
14131415
def test_int64_overflow_issues(self):
14141416
from itertools import product
14151417
from collections import defaultdict

0 commit comments

Comments
 (0)