@@ -53,36 +53,18 @@ def time_read_json_lines(self, index):
53
53
def time_read_json_lines_concat (self , index ):
54
54
concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 25000 ))
55
55
56
- def time_read_json_lines_concat_hundred (self , index ):
57
- concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 100 ))
58
-
59
- def time_read_json_lines_concat_ten_thousand (self , index ):
60
- concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 10000 ))
61
-
62
56
def time_read_json_lines_nrows (self , index ):
63
- read_json (self .fname , orient = "records" , lines = True , nrows = 15000 )
64
-
65
- def time_read_json_lines_nrows_larger (self , index ):
66
- read_json (self .fname , orient = "records" , lines = True , nrows = 45000 )
57
+ read_json (self .fname , orient = "records" , lines = True , nrows = 25000 )
67
58
68
59
def peakmem_read_json_lines (self , index ):
69
60
read_json (self .fname , orient = "records" , lines = True )
70
61
71
62
def peakmem_read_json_lines_concat (self , index ):
72
63
concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 25000 ))
73
64
74
- def peakmem_read_json_lines_concat_hundred (self , index ):
75
- concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 100 ))
76
-
77
- def peakmem_read_json_lines_concat_ten_thousand (self , index ):
78
- concat (read_json (self .fname , orient = "records" , lines = True , chunksize = 10000 ))
79
-
80
65
def peakmem_read_json_lines_nrows (self , index ):
81
66
read_json (self .fname , orient = "records" , lines = True , nrows = 15000 )
82
67
83
- def peakmem_read_json_lines_nrows_larger (self , index ):
84
- read_json (self .fname , orient = "records" , lines = True , nrows = 45000 )
85
-
86
68
87
69
class ToJSON (BaseIO ):
88
70
0 commit comments