Skip to content

Commit 2df928b

Browse files
committed
Fix GH-9068: Conditional jump or move depends on uninitialised value(s)
See GH-9068 for analysis. This patch preserves the scratch registers of the SysV x86-64 ABI by storing them to local variables and restoring them later. We need to do this to prevent the registers of the caller from being corrupted. The reason these get corrupted is because the compiler is unaware of the Valgrind replacement function and thus makes assumptions about the original function regarding registers which are not true for the replacement function.
1 parent 4c9375e commit 2df928b

File tree

1 file changed

+52
-1
lines changed

1 file changed

+52
-1
lines changed

Zend/zend_string.c

Lines changed: 52 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,58 @@ ZEND_API void zend_interned_strings_switch_storage(bool request)
374374

375375
ZEND_API bool ZEND_FASTCALL I_REPLACE_SONAME_FNNAME_ZU(NONE,zend_string_equal_val)(zend_string *s1, zend_string *s2)
376376
{
377-
return !memcmp(ZSTR_VAL(s1), ZSTR_VAL(s2), ZSTR_LEN(s1));
377+
/*
378+
* See https://github.com/php/php-src/issues/9068
379+
* We need to preserve the scratch registers because we cannot know what
380+
* the compiler assumes to be unchanged by the original
381+
* zend_string_equal_val implementation.
382+
* We can't safely use push and pop on x86-64 because of the red zone.
383+
* Instead, we save the registers to local variables.
384+
*/
385+
#if defined(__GNUC__) && defined(__x86_64__) && !defined(__ILP32__)
386+
size_t rdx, rcx, r8, r9, r10, r11;
387+
__asm__ __volatile__ (
388+
"movq %%rdx, %0\n\t"
389+
"movq %%rcx, %1\n\t"
390+
"movq %%r8, %2\n\t"
391+
"movq %%r9, %3\n\t"
392+
"movq %%r10, %4\n\t"
393+
"movq %%r11, %5\n\t"
394+
: "=mr" (rdx),
395+
"=mr" (rcx),
396+
"=mr" (r8),
397+
"=mr" (r9),
398+
"=mr" (r10),
399+
"=mr" (r11)
400+
);
401+
#endif
402+
403+
int return_value = !memcmp(ZSTR_VAL(s1), ZSTR_VAL(s2), ZSTR_LEN(s1));
404+
405+
/* Restores the registers which were saved above, including rdi and rsi */
406+
#if defined(__GNUC__) && defined(__x86_64__) && !defined(__ILP32__)
407+
__asm__ __volatile__ (
408+
"movq %0, %%rdx\n\t"
409+
"movq %1, %%rcx\n\t"
410+
"movq %2, %%r8\n\t"
411+
"movq %3, %%r9\n\t"
412+
"movq %4, %%r10\n\t"
413+
"movq %5, %%r11\n\t"
414+
"movq %6, %%rdi\n\t"
415+
"movq %7, %%rsi\n\t"
416+
:
417+
: "mr" (rdx),
418+
"mr" (rcx),
419+
"mr" (r8),
420+
"mr" (r9),
421+
"mr" (r10),
422+
"mr" (r11),
423+
"r" (s1),
424+
"r" (s2)
425+
);
426+
#endif
427+
428+
return return_value;
378429
}
379430

380431
#if defined(__GNUC__) && defined(__i386__)

0 commit comments

Comments
 (0)