@@ -35,6 +35,7 @@ impl PaginationOptions {
35
35
PaginationOptionsBuilder {
36
36
limit_page_numbers : None ,
37
37
enable_seek : false ,
38
+ enable_pages : true ,
38
39
}
39
40
}
40
41
@@ -49,6 +50,7 @@ impl PaginationOptions {
49
50
50
51
pub ( crate ) struct PaginationOptionsBuilder {
51
52
limit_page_numbers : Option < Arc < App > > ,
53
+ enable_pages : bool ,
52
54
enable_seek : bool ,
53
55
}
54
56
@@ -58,6 +60,11 @@ impl PaginationOptionsBuilder {
58
60
self
59
61
}
60
62
63
+ pub ( crate ) fn enable_pages ( mut self , enable : bool ) -> Self {
64
+ self . enable_pages = enable;
65
+ self
66
+ }
67
+
61
68
pub ( crate ) fn enable_seek ( mut self , enable : bool ) -> Self {
62
69
self . enable_seek = enable;
63
70
self
@@ -75,34 +82,38 @@ impl PaginationOptionsBuilder {
75
82
}
76
83
77
84
let page = if let Some ( s) = page_param {
78
- let numeric_page = s. parse ( ) . map_err ( |e| bad_request ( & e) ) ?;
79
- if numeric_page < 1 {
80
- return Err ( bad_request ( & format_args ! (
81
- "page indexing starts from 1, page {} is invalid" ,
82
- numeric_page,
83
- ) ) ) ;
84
- }
85
+ if self . enable_pages {
86
+ let numeric_page = s. parse ( ) . map_err ( |e| bad_request ( & e) ) ?;
87
+ if numeric_page < 1 {
88
+ return Err ( bad_request ( & format_args ! (
89
+ "page indexing starts from 1, page {} is invalid" ,
90
+ numeric_page,
91
+ ) ) ) ;
92
+ }
85
93
86
- if numeric_page > MAX_PAGE_BEFORE_SUSPECTED_BOT {
87
- req. log_metadata ( "bot" , "suspected" ) ;
88
- }
94
+ if numeric_page > MAX_PAGE_BEFORE_SUSPECTED_BOT {
95
+ req. log_metadata ( "bot" , "suspected" ) ;
96
+ }
89
97
90
- // Block large offsets for known violators of the crawler policy
91
- if let Some ( app) = self . limit_page_numbers {
92
- let config = & app. config ;
93
- let user_agent = request_header ( req, header:: USER_AGENT ) ;
94
- if numeric_page > config. max_allowed_page_offset
95
- && config
96
- . page_offset_ua_blocklist
97
- . iter ( )
98
- . any ( |blocked| user_agent. contains ( blocked) )
99
- {
100
- add_custom_metadata ( req, "cause" , "large page offset" ) ;
101
- return Err ( bad_request ( "requested page offset is too large" ) ) ;
98
+ // Block large offsets for known violators of the crawler policy
99
+ if let Some ( app) = self . limit_page_numbers {
100
+ let config = & app. config ;
101
+ let user_agent = request_header ( req, header:: USER_AGENT ) ;
102
+ if numeric_page > config. max_allowed_page_offset
103
+ && config
104
+ . page_offset_ua_blocklist
105
+ . iter ( )
106
+ . any ( |blocked| user_agent. contains ( blocked) )
107
+ {
108
+ add_custom_metadata ( req, "cause" , "large page offset" ) ;
109
+ return Err ( bad_request ( "requested page offset is too large" ) ) ;
110
+ }
102
111
}
103
- }
104
112
105
- Page :: Numeric ( numeric_page)
113
+ Page :: Numeric ( numeric_page)
114
+ } else {
115
+ return Err ( bad_request ( "?page= is not supported for this request" ) ) ;
116
+ }
106
117
} else if let Some ( s) = seek_param {
107
118
if self . enable_seek {
108
119
Page :: Seek ( RawSeekPayload ( s. clone ( ) ) )
@@ -348,6 +359,15 @@ mod tests {
348
359
) ;
349
360
}
350
361
362
+ #[ test]
363
+ fn disabled_pages ( ) {
364
+ assert_pagination_error (
365
+ PaginationOptions :: builder ( ) . enable_pages ( false ) ,
366
+ "page=1" ,
367
+ "?page= is not supported for this request" ,
368
+ ) ;
369
+ }
370
+
351
371
#[ test]
352
372
fn test_seek_encode_and_decode ( ) {
353
373
// Encoding produces the results we expect
0 commit comments