@@ -165,7 +165,7 @@ static void* dasm_labels[zend_lb_MAX];
165
165
166
166
|.section code, cold_code, jmp_table
167
167
168
- #define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0xffffffff )
168
+ #define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0x7fffffff )
169
169
170
170
#define IS_SIGNED_32BIT(val) ((((intptr_t)(val)) <= 0x7fffffff) && (((intptr_t)(val)) >= (-2147483647 - 1)))
171
171
@@ -188,7 +188,7 @@ static void* dasm_labels[zend_lb_MAX];
188
188
189
189
|.macro LOAD_ADDR, reg, addr
190
190
| .if X64
191
- || if (IS_32BIT (addr)) {
191
+ || if (IS_SIGNED_32BIT (addr)) {
192
192
| mov reg, ((ptrdiff_t)addr) // 0x48 0xc7 0xc0 <imm-32-bit>
193
193
|| } else {
194
194
| mov64 reg, ((ptrdiff_t)addr) // 0x48 0xb8 <imm-64-bit>
@@ -249,7 +249,7 @@ static void* dasm_labels[zend_lb_MAX];
249
249
250
250
|.macro ADDR_OP1, addr_ins, addr, tmp_reg
251
251
| .if X64
252
- || if (IS_32BIT (addr)) {
252
+ || if (IS_SIGNED_32BIT (addr)) {
253
253
| addr_ins ((ptrdiff_t)addr)
254
254
|| } else {
255
255
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -262,7 +262,7 @@ static void* dasm_labels[zend_lb_MAX];
262
262
263
263
|.macro ADDR_OP2_2, addr_ins, op1, addr, tmp_reg
264
264
| .if X64
265
- || if (IS_32BIT (addr)) {
265
+ || if (IS_SIGNED_32BIT (addr)) {
266
266
| addr_ins op1, ((ptrdiff_t)addr)
267
267
|| } else {
268
268
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -289,7 +289,7 @@ static void* dasm_labels[zend_lb_MAX];
289
289
290
290
|.macro MEM_OP1, mem_ins, prefix, addr, tmp_reg
291
291
| .if X64
292
- || if (IS_32BIT (addr)) {
292
+ || if (IS_SIGNED_32BIT (addr)) {
293
293
| mem_ins prefix [addr]
294
294
|| } else {
295
295
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -302,7 +302,7 @@ static void* dasm_labels[zend_lb_MAX];
302
302
303
303
|.macro MEM_OP2_1, mem_ins, prefix, addr, op2, tmp_reg
304
304
| .if X64
305
- || if (IS_32BIT (addr)) {
305
+ || if (IS_SIGNED_32BIT (addr)) {
306
306
| mem_ins prefix [addr], op2
307
307
|| } else {
308
308
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -315,7 +315,7 @@ static void* dasm_labels[zend_lb_MAX];
315
315
316
316
|.macro MEM_OP2_2, mem_ins, op1, prefix, addr, tmp_reg
317
317
| .if X64
318
- || if (IS_32BIT (addr)) {
318
+ || if (IS_SIGNED_32BIT (addr)) {
319
319
| mem_ins op1, prefix [addr]
320
320
|| } else {
321
321
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -346,7 +346,7 @@ static void* dasm_labels[zend_lb_MAX];
346
346
347
347
|.macro MEM_OP3_3, mem_ins, op1, op2, prefix, addr, tmp_reg
348
348
| .if X64
349
- || if (IS_32BIT (addr)) {
349
+ || if (IS_SIGNED_32BIT (addr)) {
350
350
| mem_ins op1, op2, prefix [addr]
351
351
|| } else {
352
352
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -581,7 +581,7 @@ static void* dasm_labels[zend_lb_MAX];
581
581
|.macro SSE_AVX_OP, sse_ins, avx_ins, reg, addr
582
582
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
583
583
| .if X64
584
- || if (IS_32BIT (Z_ZV(addr))) {
584
+ || if (IS_SIGNED_32BIT (Z_ZV(addr))) {
585
585
| SSE_AVX_INS sse_ins, avx_ins, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
586
586
|| } else {
587
587
| LOAD_ADDR r0, Z_ZV(addr)
@@ -654,7 +654,7 @@ static void* dasm_labels[zend_lb_MAX];
654
654
|| if (Z_MODE(addr) != IS_REG || reg != Z_REG(addr)) {
655
655
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
656
656
| .if X64
657
- || if (IS_32BIT (Z_ZV(addr))) {
657
+ || if (IS_SIGNED_32BIT (Z_ZV(addr))) {
658
658
| SSE_AVX_INS movsd, vmovsd, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
659
659
|| } else {
660
660
| LOAD_ADDR r0, Z_ZV(addr)
@@ -924,7 +924,7 @@ static void* dasm_labels[zend_lb_MAX];
924
924
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
925
925
|| }
926
926
| .if X64
927
- || } else if (!IS_32BIT (zv)) {
927
+ || } else if (!IS_SIGNED_32BIT (zv)) {
928
928
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
929
929
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
930
930
| .endif
@@ -978,7 +978,7 @@ static void* dasm_labels[zend_lb_MAX];
978
978
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
979
979
|| }
980
980
| .if X64
981
- || } else if (!IS_32BIT (zv)) {
981
+ || } else if (!IS_SIGNED_32BIT (zv)) {
982
982
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
983
983
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
984
984
| .endif
@@ -3130,7 +3130,7 @@ static int zend_jit_trace_begin(dasm_State **Dst, uint32_t trace_num, zend_jit_t
3130
3130
#if ZTS
3131
3131
if (1) {
3132
3132
#else
3133
- if ((sizeof(void*) == 8 && !IS_32BIT (&EG(jit_trace_num)))) {
3133
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT (&EG(jit_trace_num)))) {
3134
3134
#endif
3135
3135
/* assignment to EG(jit_trace_num) shouldn't clober CPU register used by deoptimizer */
3136
3136
if (parent) {
@@ -6053,7 +6053,7 @@ static int zend_jit_assign_to_variable(dasm_State **Dst,
6053
6053
zval *zv = Z_ZV(val_addr);
6054
6054
6055
6055
if (Z_TYPE_P(zv) == IS_DOUBLE) {
6056
- if (Z_DVAL_P(zv) == 0 || IS_32BIT (zv)) {
6056
+ if (Z_DVAL_P(zv) == 0 || IS_SIGNED_32BIT (zv)) {
6057
6057
keep_gc = 1;
6058
6058
}
6059
6059
} else if (IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
@@ -15331,7 +15331,7 @@ static zend_bool zend_needs_extra_reg_for_const(const zend_op *opline, zend_ucha
15331
15331
|.if X64
15332
15332
|| if (op_type == IS_CONST) {
15333
15333
|| zval *zv = RT_CONSTANT(opline, op);
15334
- || if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_32BIT (zv)) {
15334
+ || if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_SIGNED_32BIT (zv)) {
15335
15335
|| return 1;
15336
15336
|| } else if (Z_TYPE_P(zv) == IS_LONG && !IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
15337
15337
|| return 1;
@@ -15676,7 +15676,7 @@ static zend_regset zend_jit_get_scratch_regset(const zend_op *opline, const zend
15676
15676
#if ZTS
15677
15677
ZEND_REGSET_INCL(regset, ZREG_R0);
15678
15678
#else
15679
- if ((sizeof(void*) == 8 && !IS_32BIT (&EG(vm_interrupt)))) {
15679
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT (&EG(vm_interrupt)))) {
15680
15680
ZEND_REGSET_INCL(regset, ZREG_R0);
15681
15681
}
15682
15682
#endif
@@ -15689,7 +15689,7 @@ static zend_regset zend_jit_get_scratch_regset(const zend_op *opline, const zend
15689
15689
#if ZTS
15690
15690
ZEND_REGSET_INCL(regset, ZREG_R0);
15691
15691
#else
15692
- if ((sizeof(void*) == 8 && !IS_32BIT (&EG(vm_interrupt)))) {
15692
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT (&EG(vm_interrupt)))) {
15693
15693
ZEND_REGSET_INCL(regset, ZREG_R0);
15694
15694
}
15695
15695
#endif
0 commit comments