diff --git a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll index 5a6364967eba25..d984e266d76580 100644 --- a/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vsetvli-insert-crossbb.ll @@ -1036,3 +1036,22 @@ declare @llvm.riscv.vadd.mask.nxv4i32.nxv4i32( , i64, i64); + +; Normally a pseudo's AVL is already live in its block, so it will already be +; live where we're inserting the vsetvli, before the pseudo. In some cases the +; AVL can be from a predecessor block, so make sure we extend its live range +; across blocks. +define @cross_block_avl_extend(i64 %avl, %a, %b) { +entry: + ; Get the output vl from a vsetvli + %vl = call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 2, i64 0) + ; Force a vsetvli toggle so we need to insert a new vsetvli in exit + %d = call @llvm.riscv.vadd.nxv2i32( undef, %a, %b, i64 1) + br label %exit +exit: + ; The use of the vl from the vsetvli will be replaced with its %avl because + ; VLMAX is the same. So %avl, which was previously only live in %entry, will + ; need to be extended down toe %exit. + %c = call @llvm.riscv.vadd.nxv2i32( undef, %a, %d, i64 %vl) + ret %c +}