diff --git a/cranelift/filetests/filetests/isa/aarch64/reftypes.clif b/cranelift/filetests/filetests/isa/aarch64/reftypes.clif index 7d54b4994409..5ba6a3f24319 100644 --- a/cranelift/filetests/filetests/isa/aarch64/reftypes.clif +++ b/cranelift/filetests/filetests/isa/aarch64/reftypes.clif @@ -90,31 +90,34 @@ block3(v7: r64, v8: r64): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp +; str x24, [sp, #-16]! ; sub sp, sp, #32 ; block0: -; mov x3, x0 +; mov x5, x0 ; str x1, [sp, #16] -; mov x0, x3 -; str x3, [sp, #8] +; mov x0, x5 +; mov x24, x5 ; load_ext_name x12, TestCase(%f)+0 +; str x24, [sp, #8] ; blr x12 ; mov x11, sp -; ldr x3, [sp, #8] -; str x3, [x11] +; mov x5, x24 +; str x5, [x11] ; uxtb w12, w0 ; cbnz x12, label2 ; b label1 ; block1: -; mov x1, x3 +; mov x1, x24 ; ldr x0, [sp, #16] ; b label3 ; block2: -; mov x0, x3 +; mov x0, x24 ; ldr x1, [sp, #16] ; b label3 ; block3: ; mov x15, sp ; ldr x2, [x15] ; add sp, sp, #32 +; ldr x24, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -122,33 +125,36 @@ block3(v7: r64, v8: r64): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp +; str x24, [sp, #-0x10]! ; sub sp, sp, #0x20 -; block1: ; offset 0xc -; mov x3, x0 +; block1: ; offset 0x10 +; mov x5, x0 ; stur x1, [sp, #0x10] -; mov x0, x3 -; stur x3, [sp, #8] -; ldr x12, #0x24 -; b #0x2c +; mov x0, x5 +; mov x24, x5 +; ldr x12, #0x28 +; b #0x30 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f 0 ; .byte 0x00, 0x00, 0x00, 0x00 +; stur x24, [sp, #8] ; blr x12 ; mov x11, sp -; ldur x3, [sp, #8] -; str x3, [x11] +; mov x5, x24 +; str x5, [x11] ; uxtb w12, w0 -; cbnz x12, #0x50 -; block2: ; offset 0x44 -; mov x1, x3 +; cbnz x12, #0x58 +; block2: ; offset 0x4c +; mov x1, x24 ; ldur x0, [sp, #0x10] -; b #0x58 -; block3: ; offset 0x50 -; mov x0, x3 +; b #0x60 +; block3: ; offset 0x58 +; mov x0, x24 ; ldur x1, [sp, #0x10] -; block4: ; offset 0x58 +; block4: ; offset 0x60 ; mov x15, sp ; ldr x2, [x15] ; add sp, sp, #0x20 +; ldr x24, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/reftypes.clif b/cranelift/filetests/filetests/isa/riscv64/reftypes.clif index a1ee0ab49a8c..b3c2ae6361ad 100644 --- a/cranelift/filetests/filetests/isa/riscv64/reftypes.clif +++ b/cranelift/filetests/filetests/isa/riscv64/reftypes.clif @@ -91,26 +91,28 @@ block3(v7: r64, v8: r64): ; sd fp,0(sp) ; mv fp,sp ; sd s3,-8(sp) +; sd s7,-16(sp) ; add sp,-48 ; block0: -; mv a6,a0 +; mv t3,a0 ; sd a1,16(nominal_sp) ; mv s3,a2 -; mv a6,a0 -; sd a6,8(nominal_sp) +; mv t3,a0 +; mv s7,t3 ; load_sym t0,%f+0 +; sd s7,8(nominal_sp) ; callind t0 ; load_addr t4,0(nominal_sp) -; ld a6,8(nominal_sp) -; sd a6,0(t4) +; mv t3,s7 +; sd t3,0(t4) ; andi t0,a0,255 ; bne t0,zero,taken(label2),not_taken(label1) ; block1: -; mv a1,a6 +; mv a1,s7 ; ld a0,16(nominal_sp) ; j label3 ; block2: -; mv a0,a6 +; mv a0,s7 ; ld a1,16(nominal_sp) ; j label3 ; block3: @@ -120,6 +122,7 @@ block3(v7: r64, v8: r64): ; sd a2,0(t3) ; add sp,+48 ; ld s3,-8(sp) +; ld s7,-16(sp) ; ld ra,8(sp) ; ld fp,0(sp) ; add sp,+16 @@ -132,38 +135,41 @@ block3(v7: r64, v8: r64): ; sd s0, 0(sp) ; ori s0, sp, 0 ; sd s3, -8(sp) +; sd s7, -0x10(sp) ; addi sp, sp, -0x30 -; block1: ; offset 0x18 -; ori a6, a0, 0 +; block1: ; offset 0x1c +; ori t3, a0, 0 ; sd a1, 0x10(sp) ; ori s3, a2, 0 -; ori a0, a6, 0 -; sd a6, 8(sp) +; ori a0, t3, 0 +; ori s7, t3, 0 ; auipc t0, 0 ; ld t0, 0xc(t0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f 0 ; .byte 0x00, 0x00, 0x00, 0x00 +; sd s7, 8(sp) ; jalr t0 ; mv t4, sp -; ld a6, 8(sp) -; sd a6, 0(t4) +; ori t3, s7, 0 +; sd t3, 0(t4) ; andi t0, a0, 0xff ; bnez t0, 0x10 -; block2: ; offset 0x58 -; ori a1, a6, 0 +; block2: ; offset 0x60 +; ori a1, s7, 0 ; ld a0, 0x10(sp) ; j 0xc -; block3: ; offset 0x64 -; ori a0, a6, 0 +; block3: ; offset 0x6c +; ori a0, s7, 0 ; ld a1, 0x10(sp) -; block4: ; offset 0x6c +; block4: ; offset 0x74 ; mv a2, sp ; ld a2, 0(a2) ; ori t3, s3, 0 ; sd a2, 0(t3) ; addi sp, sp, 0x30 ; ld s3, -8(sp) +; ld s7, -0x10(sp) ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/s390x/reftypes.clif b/cranelift/filetests/filetests/isa/s390x/reftypes.clif index d7a986a5c3d9..43b5f409cd7d 100644 --- a/cranelift/filetests/filetests/isa/s390x/reftypes.clif +++ b/cranelift/filetests/filetests/isa/s390x/reftypes.clif @@ -93,68 +93,70 @@ block3(v7: r64, v8: r64): } ; VCode: -; stmg %r14, %r15, 112(%r15) +; stmg %r11, %r15, 88(%r15) ; aghi %r15, -184 ; virtual_sp_offset_adjust 160 ; block0: -; lgr %r5, %r2 ; stg %r3, 176(%r15) -; lgr %r2, %r5 -; stg %r5, 168(%r15) +; lgr %r3, %r2 +; lgr %r2, %r3 +; lgr %r11, %r3 ; bras %r1, 12 ; data %f + 0 ; lg %r3, 0(%r1) +; stg %r11, 168(%r15) ; basr %r14, %r3 -; la %r3, 160(%r15) -; lg %r5, 168(%r15) -; stg %r5, 0(%r3) +; la %r5, 160(%r15) +; lgr %r3, %r11 +; stg %r3, 0(%r5) ; lbr %r2, %r2 ; chi %r2, 0 ; jglh label2 ; jg label1 ; block1: -; lgr %r3, %r5 +; lgr %r3, %r11 ; lg %r2, 176(%r15) ; jg label3 ; block2: -; lgr %r2, %r5 +; lgr %r2, %r11 ; lg %r3, 176(%r15) ; jg label3 ; block3: ; la %r4, 160(%r15) ; lg %r4, 0(%r4) -; lmg %r14, %r15, 296(%r15) +; lmg %r11, %r15, 272(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r14, %r15, 0x70(%r15) +; stmg %r11, %r15, 0x58(%r15) ; aghi %r15, -0xb8 ; block1: ; offset 0xa -; lgr %r5, %r2 ; stg %r3, 0xb0(%r15) -; lgr %r2, %r5 -; stg %r5, 0xa8(%r15) -; bras %r1, 0x2a +; lgr %r3, %r2 +; lgr %r2, %r3 +; lgr %r11, %r3 +; bras %r1, 0x28 ; .byte 0x00, 0x00 ; reloc_external Abs8 %f 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; lg %r3, 0(%r1) +; stg %r11, 0xa8(%r15) ; basr %r14, %r3 -; la %r3, 0xa0(%r15) -; lg %r5, 0xa8(%r15) -; stg %r5, 0(%r3) +; la %r5, 0xa0(%r15) +; lgr %r3, %r11 +; stg %r3, 0(%r5) ; lbr %r2, %r2 ; chi %r2, 0 -; jglh 0x60 -; block2: ; offset 0x50 -; lgr %r3, %r5 +; jglh 0x62 +; block2: ; offset 0x52 +; lgr %r3, %r11 ; lg %r2, 0xb0(%r15) -; jg 0x6a -; block3: ; offset 0x60 -; lgr %r2, %r5 +; jg 0x6c +; block3: ; offset 0x62 +; lgr %r2, %r11 ; lg %r3, 0xb0(%r15) -; block4: ; offset 0x6a +; block4: ; offset 0x6c ; la %r4, 0xa0(%r15) ; lg %r4, 0(%r4) -; lmg %r14, %r15, 0x128(%r15) +; lmg %r11, %r15, 0x110(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/x64/fma-call.clif b/cranelift/filetests/filetests/isa/x64/fma-call.clif index 153fc48c690e..dd77739be6c6 100644 --- a/cranelift/filetests/filetests/isa/x64/fma-call.clif +++ b/cranelift/filetests/filetests/isa/x64/fma-call.clif @@ -77,38 +77,38 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movdqu %xmm0, rsp(48 + virtual offset) ; movdqu rsp(0 + virtual offset), %xmm4 ; pshufd $1, %xmm4, %xmm0 -; movdqu rsp(16 + virtual offset), %xmm2 -; pshufd $1, %xmm2, %xmm1 -; movdqu rsp(32 + virtual offset), %xmm3 -; pshufd $1, %xmm3, %xmm2 +; movdqu rsp(16 + virtual offset), %xmm8 +; pshufd $1, %xmm8, %xmm1 +; movdqu rsp(32 + virtual offset), %xmm12 +; pshufd $1, %xmm12, %xmm2 ; load_ext_name %FmaF32+0, %r9 ; call *%r9 ; movdqu %xmm0, rsp(64 + virtual offset) -; movdqu rsp(0 + virtual offset), %xmm14 -; pshufd $2, %xmm14, %xmm0 -; movdqu rsp(16 + virtual offset), %xmm13 -; pshufd $2, %xmm13, %xmm1 -; movdqu rsp(32 + virtual offset), %xmm15 -; pshufd $2, %xmm15, %xmm2 +; movdqu rsp(0 + virtual offset), %xmm4 +; pshufd $2, %xmm4, %xmm0 +; movdqu rsp(16 + virtual offset), %xmm14 +; pshufd $2, %xmm14, %xmm1 +; movdqu rsp(32 + virtual offset), %xmm3 +; pshufd $2, %xmm3, %xmm2 ; load_ext_name %FmaF32+0, %r10 ; call *%r10 ; movdqu %xmm0, rsp(80 + virtual offset) -; movdqu rsp(0 + virtual offset), %xmm14 -; pshufd $3, %xmm14, %xmm0 +; movdqu rsp(0 + virtual offset), %xmm4 +; pshufd $3, %xmm4, %xmm0 ; movdqu rsp(16 + virtual offset), %xmm1 ; pshufd $3, %xmm1, %xmm1 ; movdqu rsp(32 + virtual offset), %xmm2 ; pshufd $3, %xmm2, %xmm2 ; load_ext_name %FmaF32+0, %r11 ; call *%r11 -; movdqa %xmm0, %xmm13 ; movdqu rsp(64 + virtual offset), %xmm4 +; movdqa %xmm0, %xmm2 ; movdqu rsp(48 + virtual offset), %xmm0 ; insertps $16, %xmm0, %xmm4, %xmm0 -; movdqu rsp(80 + virtual offset), %xmm10 -; insertps $32, %xmm0, %xmm10, %xmm0 -; movdqa %xmm13, %xmm1 -; insertps $48, %xmm0, %xmm1, %xmm0 +; movdqu rsp(80 + virtual offset), %xmm1 +; insertps $32, %xmm0, %xmm1, %xmm0 +; movdqa %xmm2, %xmm3 +; insertps $48, %xmm0, %xmm3, %xmm0 ; addq %rsp, $96, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -131,38 +131,38 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movdqu %xmm0, 0x30(%rsp) ; movdqu (%rsp), %xmm4 ; pshufd $1, %xmm4, %xmm0 -; movdqu 0x10(%rsp), %xmm2 -; pshufd $1, %xmm2, %xmm1 -; movdqu 0x20(%rsp), %xmm3 -; pshufd $1, %xmm3, %xmm2 +; movdqu 0x10(%rsp), %xmm8 +; pshufd $1, %xmm8, %xmm1 +; movdqu 0x20(%rsp), %xmm12 +; pshufd $1, %xmm12, %xmm2 ; movabsq $0, %r9 ; reloc_external Abs8 %FmaF32 0 ; callq *%r9 ; movdqu %xmm0, 0x40(%rsp) -; movdqu (%rsp), %xmm14 -; pshufd $2, %xmm14, %xmm0 -; movdqu 0x10(%rsp), %xmm13 -; pshufd $2, %xmm13, %xmm1 -; movdqu 0x20(%rsp), %xmm15 -; pshufd $2, %xmm15, %xmm2 +; movdqu (%rsp), %xmm4 +; pshufd $2, %xmm4, %xmm0 +; movdqu 0x10(%rsp), %xmm14 +; pshufd $2, %xmm14, %xmm1 +; movdqu 0x20(%rsp), %xmm3 +; pshufd $2, %xmm3, %xmm2 ; movabsq $0, %r10 ; reloc_external Abs8 %FmaF32 0 ; callq *%r10 ; movdqu %xmm0, 0x50(%rsp) -; movdqu (%rsp), %xmm14 -; pshufd $3, %xmm14, %xmm0 +; movdqu (%rsp), %xmm4 +; pshufd $3, %xmm4, %xmm0 ; movdqu 0x10(%rsp), %xmm1 ; pshufd $3, %xmm1, %xmm1 ; movdqu 0x20(%rsp), %xmm2 ; pshufd $3, %xmm2, %xmm2 ; movabsq $0, %r11 ; reloc_external Abs8 %FmaF32 0 ; callq *%r11 -; movdqa %xmm0, %xmm13 ; movdqu 0x40(%rsp), %xmm4 +; movdqa %xmm0, %xmm2 ; movdqu 0x30(%rsp), %xmm0 ; insertps $0x10, %xmm4, %xmm0 -; movdqu 0x50(%rsp), %xmm10 -; insertps $0x20, %xmm10, %xmm0 -; movdqa %xmm13, %xmm1 -; insertps $0x30, %xmm1, %xmm0 +; movdqu 0x50(%rsp), %xmm1 +; insertps $0x20, %xmm1, %xmm0 +; movdqa %xmm2, %xmm3 +; insertps $0x30, %xmm3, %xmm0 ; addq $0x60, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/i128.clif b/cranelift/filetests/filetests/isa/x64/i128.clif index 6d04f354118c..c638a89d7f27 100644 --- a/cranelift/filetests/filetests/isa/x64/i128.clif +++ b/cranelift/filetests/filetests/isa/x64/i128.clif @@ -1615,6 +1615,7 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx ; movq %rsi, %r11 @@ -1642,6 +1643,7 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; movq %rsi, %r11 @@ -1674,12 +1676,14 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r11 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r10 ; shrq %cl, %r10, %r10 +; movq %rcx, %r11 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r11, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r11 ; shlq %cl, %r11, %r11 @@ -1701,12 +1705,14 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r11 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r10 ; shrq %cl, %r10 +; movq %rcx, %r11 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r11, %rdi ; subq %rdi, %rcx ; movq %rsi, %r11 ; shlq %cl, %r11 @@ -1733,12 +1739,14 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r11 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r10 ; sarq %cl, %r10, %r10 +; movq %rcx, %r11 ; movl $64, %ecx -; movq %rdx, %rax +; movq %r11, %rax ; subq %rcx, %rax, %rcx ; movq %rsi, %r9 ; shlq %cl, %r9, %r9 @@ -1762,12 +1770,14 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r11 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r10 ; sarq %cl, %r10 +; movq %rcx, %r11 ; movl $0x40, %ecx -; movq %rdx, %rax +; movq %r11, %rax ; subq %rax, %rcx ; movq %rsi, %r9 ; shlq %cl, %r9 @@ -1796,20 +1806,23 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r8 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx +; movq %rcx, %r8 ; movq %rsi, %r11 ; shlq %cl, %r11, %r11 -; movq %rcx, %r8 ; movl $64, %ecx -; subq %rcx, %r8, %rcx +; movq %r8, %rax +; subq %rcx, %rax, %rcx ; movq %rdi, %r10 ; shrq %cl, %r10, %r10 ; xorq %rax, %rax, %rax -; testq $127, %r8 +; movq %r8, %rcx +; testq $127, %rcx ; cmovzq %rax, %r10, %r10 ; orq %r10, %r11, %r10 -; testq $64, %r8 +; testq $64, %rcx ; cmovzq %rdx, %rax, %rax ; cmovzq %r10, %rdx, %rdx ; movl $128, %ecx @@ -1845,20 +1858,23 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r8 ; movq %rdi, %rdx ; shlq %cl, %rdx +; movq %rcx, %r8 ; movq %rsi, %r11 ; shlq %cl, %r11 -; movq %rcx, %r8 ; movl $0x40, %ecx -; subq %r8, %rcx +; movq %r8, %rax +; subq %rax, %rcx ; movq %rdi, %r10 ; shrq %cl, %r10 ; xorq %rax, %rax -; testq $0x7f, %r8 +; movq %r8, %rcx +; testq $0x7f, %rcx ; cmoveq %rax, %r10 ; orq %r11, %r10 -; testq $0x40, %r8 +; testq $0x40, %rcx ; cmoveq %rdx, %rax ; cmoveq %r10, %rdx ; movl $0x80, %ecx @@ -1899,21 +1915,23 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 +; movq %rcx, %r9 ; movq %rsi, %r10 ; shrq %cl, %r10, %r10 -; movq %rcx, %r9 ; movl $64, %ecx ; movq %r9, %rax ; subq %rcx, %rax, %rcx ; movq %rsi, %r11 ; shlq %cl, %r11, %r11 ; xorq %rdx, %rdx, %rdx -; testq $127, %rax +; movq %r9, %rcx +; testq $127, %rcx ; cmovzq %rdx, %r11, %r11 ; orq %r11, %r8, %r11 -; testq $64, %rax +; testq $64, %rcx ; movq %r10, %rax ; cmovzq %r11, %rax, %rax ; cmovzq %r10, %rdx, %rdx @@ -1949,21 +1967,23 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %r8 ; shrq %cl, %r8 +; movq %rcx, %r9 ; movq %rsi, %r10 ; shrq %cl, %r10 -; movq %rcx, %r9 ; movl $0x40, %ecx ; movq %r9, %rax ; subq %rax, %rcx ; movq %rsi, %r11 ; shlq %cl, %r11 ; xorq %rdx, %rdx -; testq $0x7f, %rax +; movq %r9, %rcx +; testq $0x7f, %rcx ; cmoveq %rdx, %r11 ; orq %r8, %r11 -; testq $0x40, %rax +; testq $0x40, %rcx ; movq %r10, %rax ; cmoveq %r11, %rax ; cmoveq %r10, %rdx diff --git a/cranelift/filetests/filetests/isa/x64/ishl.clif b/cranelift/filetests/filetests/isa/x64/ishl.clif index 73cb469a9eff..09437dc63960 100644 --- a/cranelift/filetests/filetests/isa/x64/ishl.clif +++ b/cranelift/filetests/filetests/isa/x64/ishl.clif @@ -78,6 +78,7 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx ; movq %rsi, %r10 @@ -105,6 +106,7 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx ; movq %rsi, %r10 @@ -137,6 +139,7 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx ; movq %rsi, %r10 @@ -164,6 +167,7 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx ; movq %rsi, %r10 @@ -196,6 +200,7 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx ; movq %rsi, %r10 @@ -223,6 +228,7 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx ; movq %rsi, %r10 @@ -255,6 +261,7 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx, %rdx ; movq %rsi, %r10 @@ -282,6 +289,7 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r9 ; movq %rdi, %rdx ; shlq %cl, %rdx ; movq %rsi, %r10 diff --git a/cranelift/filetests/filetests/isa/x64/sshr.clif b/cranelift/filetests/filetests/isa/x64/sshr.clif index 79a4d46e501d..3bb715665beb 100644 --- a/cranelift/filetests/filetests/isa/x64/sshr.clif +++ b/cranelift/filetests/filetests/isa/x64/sshr.clif @@ -83,12 +83,14 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8, %r8 @@ -112,12 +114,14 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8 @@ -146,12 +150,14 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8, %r8 @@ -175,12 +181,14 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8 @@ -209,12 +217,14 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8, %r8 @@ -238,12 +248,14 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8 @@ -272,12 +284,14 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8, %r8 @@ -301,12 +315,14 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r11 ; shrq %cl, %r11 ; movq %rsi, %r9 ; sarq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r8 ; shlq %cl, %r8 diff --git a/cranelift/filetests/filetests/isa/x64/ushr.clif b/cranelift/filetests/filetests/isa/x64/ushr.clif index 6514cbd12dbc..82fa4ca21fd9 100644 --- a/cranelift/filetests/filetests/isa/x64/ushr.clif +++ b/cranelift/filetests/filetests/isa/x64/ushr.clif @@ -78,12 +78,14 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10, %r10 @@ -105,12 +107,14 @@ block0(v0: i128, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10 @@ -137,12 +141,14 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10, %r10 @@ -164,12 +170,14 @@ block0(v0: i128, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10 @@ -196,12 +204,14 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10, %r10 @@ -223,12 +233,14 @@ block0(v0: i128, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10 @@ -255,12 +267,14 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9, %r9 +; movq %rcx, %r10 ; movl $64, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rcx, %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10, %r10 @@ -282,12 +296,14 @@ block0(v0: i128, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx +; movq %rdx, %r10 ; movq %rdi, %r8 ; shrq %cl, %r8 ; movq %rsi, %r9 ; shrq %cl, %r9 +; movq %rcx, %r10 ; movl $0x40, %ecx -; movq %rdx, %rdi +; movq %r10, %rdi ; subq %rdi, %rcx ; movq %rsi, %r10 ; shlq %cl, %r10