49
49
// We only define stack probing for these architectures today.
50
50
#![ cfg( any( target_arch = "x86_64" , target_arch = "x86" ) ) ]
51
51
52
- <<<<<<< HEAD
53
- // SAFETY: defined in this module.
54
- // FIXME(extern_custom): the ABI is not correct.
55
- unsafe extern "C" {
56
- pub fn __rust_probestack( ) ;
57
- }
58
-
59
- // A wrapper for our implementation of __rust_probestack, which allows us to
60
- // keep the assembly inline while controlling all CFI directives in the assembly
61
- // emitted for the function.
62
- //
63
- // This is the ELF version.
64
- #[ cfg( not( any( target_vendor = "apple" , target_os = "uefi" ) ) ) ]
65
- macro_rules! define_rust_probestack {
66
- ( $body: expr) => {
67
- concat!(
68
- "
69
- .pushsection .text.__rust_probestack
70
- .globl __rust_probestack
71
- .type __rust_probestack, @function
72
- .hidden __rust_probestack
73
- __rust_probestack:
74
- " ,
75
- $body,
76
- "
77
- .size __rust_probestack, . - __rust_probestack
78
- .popsection
79
- "
80
- )
81
- } ;
82
- }
83
-
84
- #[ cfg( all( target_os = "uefi" , target_arch = "x86_64" ) ) ]
85
- macro_rules! define_rust_probestack {
86
- ( $body: expr) => {
87
- concat!(
88
- "
89
- .globl __rust_probestack
90
- __rust_probestack:
91
- " ,
92
- $body
93
- )
94
- } ;
95
- }
96
-
97
- // Same as above, but for Mach-O. Note that the triple underscore
98
- // is deliberate
99
- #[ cfg( target_vendor = "apple" ) ]
100
- macro_rules! define_rust_probestack {
101
- ( $body: expr) => {
102
- concat!(
103
- "
104
- .globl ___rust_probestack
105
- ___rust_probestack:
106
- " ,
107
- $body
108
- )
109
- } ;
110
- }
111
-
112
- // In UEFI x86 arch, triple underscore is deliberate.
113
- #[ cfg( all( target_os = "uefi" , target_arch = "x86" ) ) ]
114
- macro_rules! define_rust_probestack {
115
- ( $body: expr) => {
116
- concat!(
117
- "
118
- .globl ___rust_probestack
119
- ___rust_probestack:
120
- " ,
121
- $body
122
- )
123
- } ;
124
- }
125
-
126
- =======
127
- >>>>>>> d6dd2696443 ( use `#[ naked] ` for `__rust_probestack`)
128
52
// Our goal here is to touch each page between %rsp+8 and %rsp+8-%rax,
129
53
// ensuring that if any pages are unmapped we'll make a page fault.
130
54
//
@@ -159,56 +83,56 @@ pub unsafe extern "C" fn __rust_probestack() {
159
83
160
84
core:: arch:: naked_asm!(
161
85
"
162
- .cfi_startproc
163
- pushq %rbp
164
- .cfi_adjust_cfa_offset 8
165
- .cfi_offset %rbp, -16
166
- movq %rsp, %rbp
167
- .cfi_def_cfa_register %rbp
168
-
169
- mov %rax,%r11 // duplicate %rax as we're clobbering %r11
170
-
171
- // Main loop, taken in one page increments. We're decrementing rsp by
172
- // a page each time until there's less than a page remaining. We're
173
- // guaranteed that this function isn't called unless there's more than a
174
- // page needed.
175
- //
176
- // Note that we're also testing against `8(%rsp)` to account for the 8
177
- // bytes pushed on the stack orginally with our return address. Using
178
- // `8(%rsp)` simulates us testing the stack pointer in the caller's
179
- // context.
180
-
181
- // It's usually called when %rax >= 0x1000, but that's not always true.
182
- // Dynamic stack allocation, which is needed to implement unsized
183
- // rvalues, triggers stackprobe even if %rax < 0x1000.
184
- // Thus we have to check %r11 first to avoid segfault.
185
- cmp $0x1000,%r11
186
- jna 3f
187
- 2:
188
- sub $0x1000,%rsp
189
- test %rsp,8(%rsp)
190
- sub $0x1000,%r11
191
- cmp $0x1000,%r11
192
- ja 2b
193
-
194
- 3:
195
- // Finish up the last remaining stack space requested, getting the last
196
- // bits out of r11
197
- sub %r11,%rsp
198
- test %rsp,8(%rsp)
199
-
200
- // Restore the stack pointer to what it previously was when entering
201
- // this function. The caller will readjust the stack pointer after we
202
- // return.
203
- add %rax,%rsp
204
-
205
- leave
206
- .cfi_def_cfa_register %rsp
207
- .cfi_adjust_cfa_offset -8
86
+ .cfi_startproc
87
+ pushq %rbp
88
+ .cfi_adjust_cfa_offset 8
89
+ .cfi_offset %rbp, -16
90
+ movq %rsp, %rbp
91
+ .cfi_def_cfa_register %rbp
92
+
93
+ mov %rax,%r11 // duplicate %rax as we're clobbering %r11
94
+
95
+ // Main loop, taken in one page increments. We're decrementing rsp by
96
+ // a page each time until there's less than a page remaining. We're
97
+ // guaranteed that this function isn't called unless there's more than a
98
+ // page needed.
99
+ //
100
+ // Note that we're also testing against `8(%rsp)` to account for the 8
101
+ // bytes pushed on the stack orginally with our return address. Using
102
+ // `8(%rsp)` simulates us testing the stack pointer in the caller's
103
+ // context.
104
+
105
+ // It's usually called when %rax >= 0x1000, but that's not always true.
106
+ // Dynamic stack allocation, which is needed to implement unsized
107
+ // rvalues, triggers stackprobe even if %rax < 0x1000.
108
+ // Thus we have to check %r11 first to avoid segfault.
109
+ cmp $0x1000,%r11
110
+ jna 3f
111
+ 2:
112
+ sub $0x1000,%rsp
113
+ test %rsp,8(%rsp)
114
+ sub $0x1000,%r11
115
+ cmp $0x1000,%r11
116
+ ja 2b
117
+
118
+ 3:
119
+ // Finish up the last remaining stack space requested, getting the last
120
+ // bits out of r11
121
+ sub %r11,%rsp
122
+ test %rsp,8(%rsp)
123
+
124
+ // Restore the stack pointer to what it previously was when entering
125
+ // this function. The caller will readjust the stack pointer after we
126
+ // return.
127
+ add %rax,%rsp
128
+
129
+ leave
130
+ .cfi_def_cfa_register %rsp
131
+ .cfi_adjust_cfa_offset -8
208
132
" ,
209
133
ret!( ) ,
210
134
"
211
- .cfi_endproc
135
+ .cfi_endproc
212
136
" ,
213
137
options( att_syntax)
214
138
)
@@ -227,35 +151,35 @@ pub unsafe extern "C" fn __rust_probestack() {
227
151
pub unsafe extern "C" fn __rust_probestack ( ) {
228
152
core:: arch:: naked_asm!(
229
153
"
230
- .cfi_startproc
231
- push %ebp
232
- .cfi_adjust_cfa_offset 4
233
- .cfi_offset %ebp, -8
234
- mov %esp, %ebp
235
- .cfi_def_cfa_register %ebp
236
- push %ecx
237
- mov %eax,%ecx
238
-
239
- cmp $0x1000,%ecx
240
- jna 3f
241
- 2:
242
- sub $0x1000,%esp
243
- test %esp,8(%esp)
244
- sub $0x1000,%ecx
245
- cmp $0x1000,%ecx
246
- ja 2b
247
-
248
- 3:
249
- sub %ecx,%esp
250
- test %esp,8(%esp)
251
-
252
- add %eax,%esp
253
- pop %ecx
254
- leave
255
- .cfi_def_cfa_register %esp
256
- .cfi_adjust_cfa_offset -4
257
- ret
258
- .cfi_endproc
154
+ .cfi_startproc
155
+ push %ebp
156
+ .cfi_adjust_cfa_offset 4
157
+ .cfi_offset %ebp, -8
158
+ mov %esp, %ebp
159
+ .cfi_def_cfa_register %ebp
160
+ push %ecx
161
+ mov %eax,%ecx
162
+
163
+ cmp $0x1000,%ecx
164
+ jna 3f
165
+ 2:
166
+ sub $0x1000,%esp
167
+ test %esp,8(%esp)
168
+ sub $0x1000,%ecx
169
+ cmp $0x1000,%ecx
170
+ ja 2b
171
+
172
+ 3:
173
+ sub %ecx,%esp
174
+ test %esp,8(%esp)
175
+
176
+ add %eax,%esp
177
+ pop %ecx
178
+ leave
179
+ .cfi_def_cfa_register %esp
180
+ .cfi_adjust_cfa_offset -4
181
+ ret
182
+ .cfi_endproc
259
183
" ,
260
184
options( att_syntax)
261
185
)
@@ -279,40 +203,40 @@ pub unsafe extern "C" fn __rust_probestack() {
279
203
pub unsafe extern "C" fn __rust_probestack ( ) {
280
204
core:: arch:: naked_asm!(
281
205
"
282
- .cfi_startproc
283
- push %ebp
284
- .cfi_adjust_cfa_offset 4
285
- .cfi_offset %ebp, -8
286
- mov %esp, %ebp
287
- .cfi_def_cfa_register %ebp
288
- push %ecx
289
- push %edx
290
- mov %eax,%ecx
291
-
292
- cmp $0x1000,%ecx
293
- jna 3f
294
- 2:
295
- sub $0x1000,%esp
296
- test %esp,8(%esp)
297
- sub $0x1000,%ecx
298
- cmp $0x1000,%ecx
299
- ja 2b
300
-
301
- 3:
302
- sub %ecx,%esp
303
- test %esp,8(%esp)
304
- mov 4(%ebp),%edx
305
- mov %edx, 12(%esp)
306
- add %eax,%esp
307
- pop %edx
308
- pop %ecx
309
- leave
310
-
311
- sub %eax, %esp
312
- .cfi_def_cfa_register %esp
313
- .cfi_adjust_cfa_offset -4
314
- ret
315
- .cfi_endproc
206
+ .cfi_startproc
207
+ push %ebp
208
+ .cfi_adjust_cfa_offset 4
209
+ .cfi_offset %ebp, -8
210
+ mov %esp, %ebp
211
+ .cfi_def_cfa_register %ebp
212
+ push %ecx
213
+ push %edx
214
+ mov %eax,%ecx
215
+
216
+ cmp $0x1000,%ecx
217
+ jna 3f
218
+ 2:
219
+ sub $0x1000,%esp
220
+ test %esp,8(%esp)
221
+ sub $0x1000,%ecx
222
+ cmp $0x1000,%ecx
223
+ ja 2b
224
+
225
+ 3:
226
+ sub %ecx,%esp
227
+ test %esp,8(%esp)
228
+ mov 4(%ebp),%edx
229
+ mov %edx, 12(%esp)
230
+ add %eax,%esp
231
+ pop %edx
232
+ pop %ecx
233
+ leave
234
+
235
+ sub %eax, %esp
236
+ .cfi_def_cfa_register %esp
237
+ .cfi_adjust_cfa_offset -4
238
+ ret
239
+ .cfi_endproc
316
240
" ,
317
241
options( att_syntax)
318
242
)
0 commit comments