From e6cb64ae9e8d5dc6e793f2a51b6ab55c278cb8a2 Mon Sep 17 00:00:00 2001 From: Nikita Popov Date: Tue, 3 May 2022 17:08:05 +0200 Subject: [PATCH] [X86] Add additional add of and not tests (NFC) Add tests for cases involving a truncated not. --- llvm/test/CodeGen/X86/add-and-not.ll | 88 ++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/llvm/test/CodeGen/X86/add-and-not.ll b/llvm/test/CodeGen/X86/add-and-not.ll index 570aac6ea2e89de..bf8e507d2f82f88 100644 --- a/llvm/test/CodeGen/X86/add-and-not.ll +++ b/llvm/test/CodeGen/X86/add-and-not.ll @@ -115,3 +115,91 @@ define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind { ret i8 %add } +define i64 @add_and_xor_const(i64 %x) { +; CHECK-LABEL: add_and_xor_const: +; CHECK: # %bb.0: +; CHECK-NEXT: movl %edi, %eax +; CHECK-NEXT: notl %eax +; CHECK-NEXT: andl $1, %eax +; CHECK-NEXT: addq %rdi, %rax +; CHECK-NEXT: retq + %xor = xor i64 %x, -1 + %and = and i64 %xor, 1 + %add = add i64 %and, %x + ret i64 %add +} + +define i64 @add_and_xor_const_wrong_op(i64 %x, i64 %y) { +; CHECK-LABEL: add_and_xor_const_wrong_op: +; CHECK: # %bb.0: +; CHECK-NEXT: notl %esi +; CHECK-NEXT: andl $1, %esi +; CHECK-NEXT: leaq (%rsi,%rdi), %rax +; CHECK-NEXT: retq + %xor = xor i64 %y, -1 + %and = and i64 %xor, 1 + %add = add i64 %and, %x + ret i64 %add +} + +define i64 @add_and_xor_const_explicit_trunc(i64 %x) { +; CHECK-LABEL: add_and_xor_const_explicit_trunc: +; CHECK: # %bb.0: +; CHECK-NEXT: movl %edi, %eax +; CHECK-NEXT: notl %eax +; CHECK-NEXT: andl $1, %eax +; CHECK-NEXT: addq %rdi, %rax +; CHECK-NEXT: retq + %trunc = trunc i64 %x to i32 + %xor = xor i32 %trunc, -1 + %ext = sext i32 %xor to i64 + %and = and i64 %ext, 1 + %add = add i64 %and, %x + ret i64 %add +} + +define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) { +; CHECK-LABEL: add_and_xor_const_explicit_trunc_wrong_mask: +; CHECK: # %bb.0: +; CHECK-NEXT: movl %edi, %eax +; CHECK-NEXT: notl %eax +; CHECK-NEXT: movslq %eax, %rcx +; CHECK-NEXT: movabsq $4294967297, %rax # imm = 0x100000001 +; CHECK-NEXT: andq %rcx, %rax +; CHECK-NEXT: addq %rdi, %rax +; CHECK-NEXT: retq + %trunc = trunc i64 %x to i32 + %xor = xor i32 %trunc, -1 + %ext = sext i32 %xor to i64 + %and = and i64 %ext, 4294967297 + %add = add i64 %and, %x + ret i64 %add +} + +define i8* @gep_and_xor(i8* %a, i64 %m) { +; CHECK-LABEL: gep_and_xor: +; CHECK: # %bb.0: +; CHECK-NEXT: movq %rdi, %rax +; CHECK-NEXT: orq %rsi, %rax +; CHECK-NEXT: retq + %old = ptrtoint i8* %a to i64 + %old.not = and i64 %old, %m + %offset = xor i64 %old.not, %m + %p = getelementptr i8, i8* %a, i64 %offset + ret i8* %p +} + +define i8* @gep_and_xor_const(i8* %a) { +; CHECK-LABEL: gep_and_xor_const: +; CHECK: # %bb.0: +; CHECK-NEXT: movl %edi, %eax +; CHECK-NEXT: notl %eax +; CHECK-NEXT: andl $1, %eax +; CHECK-NEXT: addq %rdi, %rax +; CHECK-NEXT: retq + %old = ptrtoint i8* %a to i64 + %old.not = and i64 %old, 1 + %offset = xor i64 %old.not, 1 + %p = getelementptr i8, i8* %a, i64 %offset + ret i8* %p +}