From 44343c777ca8c02262d1d381a2cc24866b3c5414 Mon Sep 17 00:00:00 2001 From: Brian Kessler Date: Tue, 23 Apr 2019 22:04:38 -0600 Subject: [PATCH] cmd/compile: add signed divisibility by power of 2 rules MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For powers of two (c=1< TryBot-Result: Gobot Gobot Reviewed-by: Keith Randall --- .../internal/gc/testdata/arith_test.go | 291 ++++ .../compile/internal/ssa/gen/generic.rules | 35 + .../compile/internal/ssa/rewritegeneric.go | 1546 ++++++++++++++++- .../compile/internal/test/divconst_test.go | 26 + test/codegen/arithmetic.go | 21 +- 5 files changed, 1910 insertions(+), 9 deletions(-) diff --git a/src/cmd/compile/internal/gc/testdata/arith_test.go b/src/cmd/compile/internal/gc/testdata/arith_test.go index 728ca56892f2a7..1ec9ae02c95fba 100644 --- a/src/cmd/compile/internal/gc/testdata/arith_test.go +++ b/src/cmd/compile/internal/gc/testdata/arith_test.go @@ -7,6 +7,7 @@ package main import ( + "math" "runtime" "testing" ) @@ -924,6 +925,7 @@ func TestArithmetic(t *testing.T) { testShiftRemoval(t) testShiftedOps(t) testDivFixUp(t) + testDivisibleSignedPow2(t) } // testDivFixUp ensures that signed division fix-ups are being generated. @@ -952,3 +954,292 @@ func testDivFixUp(t *testing.T) { g64 = z % int64(i) } } + +//go:noinline +func divisible_int8_2to1(x int8) bool { + return x%(1<<1) == 0 +} + +//go:noinline +func divisible_int8_2to2(x int8) bool { + return x%(1<<2) == 0 +} + +//go:noinline +func divisible_int8_2to3(x int8) bool { + return x%(1<<3) == 0 +} + +//go:noinline +func divisible_int8_2to4(x int8) bool { + return x%(1<<4) == 0 +} + +//go:noinline +func divisible_int8_2to5(x int8) bool { + return x%(1<<5) == 0 +} + +//go:noinline +func divisible_int8_2to6(x int8) bool { + return x%(1<<6) == 0 +} + +//go:noinline +func divisible_int16_2to1(x int16) bool { + return x%(1<<1) == 0 +} + +//go:noinline +func divisible_int16_2to2(x int16) bool { + return x%(1<<2) == 0 +} + +//go:noinline +func divisible_int16_2to3(x int16) bool { + return x%(1<<3) == 0 +} + +//go:noinline +func divisible_int16_2to4(x int16) bool { + return x%(1<<4) == 0 +} + +//go:noinline +func divisible_int16_2to5(x int16) bool { + return x%(1<<5) == 0 +} + +//go:noinline +func divisible_int16_2to6(x int16) bool { + return x%(1<<6) == 0 +} + +//go:noinline +func divisible_int16_2to7(x int16) bool { + return x%(1<<7) == 0 +} + +//go:noinline +func divisible_int16_2to8(x int16) bool { + return x%(1<<8) == 0 +} + +//go:noinline +func divisible_int16_2to9(x int16) bool { + return x%(1<<9) == 0 +} + +//go:noinline +func divisible_int16_2to10(x int16) bool { + return x%(1<<10) == 0 +} + +//go:noinline +func divisible_int16_2to11(x int16) bool { + return x%(1<<11) == 0 +} + +//go:noinline +func divisible_int16_2to12(x int16) bool { + return x%(1<<12) == 0 +} + +//go:noinline +func divisible_int16_2to13(x int16) bool { + return x%(1<<13) == 0 +} + +//go:noinline +func divisible_int16_2to14(x int16) bool { + return x%(1<<14) == 0 +} + +//go:noinline +func divisible_int32_2to4(x int32) bool { + return x%(1<<4) == 0 +} + +//go:noinline +func divisible_int32_2to15(x int32) bool { + return x%(1<<15) == 0 +} + +//go:noinline +func divisible_int32_2to26(x int32) bool { + return x%(1<<26) == 0 +} + +//go:noinline +func divisible_int64_2to4(x int64) bool { + return x%(1<<4) == 0 +} + +//go:noinline +func divisible_int64_2to15(x int64) bool { + return x%(1<<15) == 0 +} + +//go:noinline +func divisible_int64_2to26(x int64) bool { + return x%(1<<26) == 0 +} + +//go:noinline +func divisible_int64_2to34(x int64) bool { + return x%(1<<34) == 0 +} + +//go:noinline +func divisible_int64_2to48(x int64) bool { + return x%(1<<48) == 0 +} + +//go:noinline +func divisible_int64_2to57(x int64) bool { + return x%(1<<57) == 0 +} + +// testDivisibleSignedPow2 confirms that x%(1< x (Const64 [c])) && x.Op != OpConst64 && c > 0 && umagicOK(64,c) -> (Sub64 x (Mul64 (Div64u x (Const64 [c])) (Const64 [c]))) +// Divisibility check for signed integers for power of two constant are simple mask. +// However, we must match against the rewritten n%c == 0 -> n - c*(n/c) == 0 -> n == c *(n/c) +// where n/c contains fixup code to handle signed n. +(Eq8 n (Lsh8x64 + (Rsh8x64 + (Add8 n (Rsh8Ux64 (Rsh8x64 n (Const64 [ 7])) (Const64 [kbar]))) + (Const64 [k])) + (Const64 [k])) +) && k > 0 && k < 7 && kbar == 8 - k + -> (Eq8 (And8 n (Const8 [int64(1< [0])) + +(Eq16 n (Lsh16x64 + (Rsh16x64 + (Add16 n (Rsh16Ux64 (Rsh16x64 n (Const64 [15])) (Const64 [kbar]))) + (Const64 [k])) + (Const64 [k])) +) && k > 0 && k < 15 && kbar == 16 - k + -> (Eq16 (And16 n (Const16 [int64(1< [0])) + +(Eq32 n (Lsh32x64 + (Rsh32x64 + (Add32 n (Rsh32Ux64 (Rsh32x64 n (Const64 [31])) (Const64 [kbar]))) + (Const64 [k])) + (Const64 [k])) +) && k > 0 && k < 31 && kbar == 32 - k + -> (Eq32 (And32 n (Const32 [int64(1< [0])) + +(Eq64 n (Lsh64x64 + (Rsh64x64 + (Add64 n (Rsh64Ux64 (Rsh64x64 n (Const64 [63])) (Const64 [kbar]))) + (Const64 [k])) + (Const64 [k])) +) && k > 0 && k < 63 && kbar == 64 - k + -> (Eq64 (And64 n (Const64 [int64(1< [0])) + (Eq(8|16|32|64) s:(Sub(8|16|32|64) x y) (Const(8|16|32|64) [0])) && s.Uses == 1 -> (Eq(8|16|32|64) x y) (Neq(8|16|32|64) s:(Sub(8|16|32|64) x y) (Const(8|16|32|64) [0])) && s.Uses == 1 -> (Neq(8|16|32|64) x y) diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index fe2fbb82c09af6..d30e65b7f18d95 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -98,17 +98,17 @@ func rewriteValuegeneric(v *Value) bool { case OpDiv8u: return rewriteValuegeneric_OpDiv8u_0(v) case OpEq16: - return rewriteValuegeneric_OpEq16_0(v) + return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) case OpEq32: - return rewriteValuegeneric_OpEq32_0(v) + return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) case OpEq32F: return rewriteValuegeneric_OpEq32F_0(v) case OpEq64: - return rewriteValuegeneric_OpEq64_0(v) + return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) case OpEq64F: return rewriteValuegeneric_OpEq64F_0(v) case OpEq8: - return rewriteValuegeneric_OpEq8_0(v) + return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) case OpEqB: return rewriteValuegeneric_OpEqB_0(v) case OpEqInter: @@ -8775,6 +8775,7 @@ func rewriteValuegeneric_OpDiv8u_0(v *Value) bool { } func rewriteValuegeneric_OpEq16_0(v *Value) bool { b := v.Block + typ := &b.Func.Config.Types // match: (Eq16 x x) // cond: // result: (ConstBool [1]) @@ -8951,6 +8952,389 @@ func rewriteValuegeneric_OpEq16_0(v *Value) bool { v.AuxInt = b2i(c == d) return true } + // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 n (Rsh16Ux64 (Rsh16x64 n (Const64 [15])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 15 && kbar == 16 - k + // result: (Eq16 (And16 n (Const16 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh16x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh16x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd16 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + if n != v_1_0_0.Args[0] { + break + } + v_1_0_0_1 := v_1_0_0.Args[1] + if v_1_0_0_1.Op != OpRsh16Ux64 { + break + } + if v_1_0_0_1.Type != t { + break + } + _ = v_1_0_0_1.Args[1] + v_1_0_0_1_0 := v_1_0_0_1.Args[0] + if v_1_0_0_1_0.Op != OpRsh16x64 { + break + } + if v_1_0_0_1_0.Type != t { + break + } + _ = v_1_0_0_1_0.Args[1] + if n != v_1_0_0_1_0.Args[0] { + break + } + v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1] + if v_1_0_0_1_0_1.Op != OpConst64 { + break + } + if v_1_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_1_0_1.AuxInt != 15 { + break + } + v_1_0_0_1_1 := v_1_0_0_1.Args[1] + if v_1_0_0_1_1.Op != OpConst64 { + break + } + if v_1_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_1_1.AuxInt + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 15 && kbar == 16-k) { + break + } + v.reset(OpEq16) + v0 := b.NewValue0(v.Pos, OpAnd16, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst16, t) + v1.AuxInt = int64(1< (Rsh16Ux64 (Rsh16x64 n (Const64 [15])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 15 && kbar == 16 - k + // result: (Eq16 (And16 n (Const16 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh16x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh16x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd16 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + v_1_0_0_0 := v_1_0_0.Args[0] + if v_1_0_0_0.Op != OpRsh16Ux64 { + break + } + if v_1_0_0_0.Type != t { + break + } + _ = v_1_0_0_0.Args[1] + v_1_0_0_0_0 := v_1_0_0_0.Args[0] + if v_1_0_0_0_0.Op != OpRsh16x64 { + break + } + if v_1_0_0_0_0.Type != t { + break + } + _ = v_1_0_0_0_0.Args[1] + if n != v_1_0_0_0_0.Args[0] { + break + } + v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1] + if v_1_0_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_0_0_1.AuxInt != 15 { + break + } + v_1_0_0_0_1 := v_1_0_0_0.Args[1] + if v_1_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_0_1.AuxInt + if n != v_1_0_0.Args[1] { + break + } + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 15 && kbar == 16-k) { + break + } + v.reset(OpEq16) + v0 := b.NewValue0(v.Pos, OpAnd16, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst16, t) + v1.AuxInt = int64(1< n (Rsh16Ux64 (Rsh16x64 n (Const64 [15])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 15 && kbar == 16 - k + // result: (Eq16 (And16 n (Const16 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh16x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh16x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd16 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + if n != v_0_0_0.Args[0] { + break + } + v_0_0_0_1 := v_0_0_0.Args[1] + if v_0_0_0_1.Op != OpRsh16Ux64 { + break + } + if v_0_0_0_1.Type != t { + break + } + _ = v_0_0_0_1.Args[1] + v_0_0_0_1_0 := v_0_0_0_1.Args[0] + if v_0_0_0_1_0.Op != OpRsh16x64 { + break + } + if v_0_0_0_1_0.Type != t { + break + } + _ = v_0_0_0_1_0.Args[1] + if n != v_0_0_0_1_0.Args[0] { + break + } + v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1] + if v_0_0_0_1_0_1.Op != OpConst64 { + break + } + if v_0_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_1_0_1.AuxInt != 15 { + break + } + v_0_0_0_1_1 := v_0_0_0_1.Args[1] + if v_0_0_0_1_1.Op != OpConst64 { + break + } + if v_0_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_1_1.AuxInt + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 15 && kbar == 16-k) { + break + } + v.reset(OpEq16) + v0 := b.NewValue0(v.Pos, OpAnd16, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst16, t) + v1.AuxInt = int64(1< (Rsh16Ux64 (Rsh16x64 n (Const64 [15])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 15 && kbar == 16 - k + // result: (Eq16 (And16 n (Const16 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh16x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh16x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd16 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + v_0_0_0_0 := v_0_0_0.Args[0] + if v_0_0_0_0.Op != OpRsh16Ux64 { + break + } + if v_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0.Args[1] + v_0_0_0_0_0 := v_0_0_0_0.Args[0] + if v_0_0_0_0_0.Op != OpRsh16x64 { + break + } + if v_0_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0_0.Args[1] + if n != v_0_0_0_0_0.Args[0] { + break + } + v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1] + if v_0_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_0_0_1.AuxInt != 15 { + break + } + v_0_0_0_0_1 := v_0_0_0_0.Args[1] + if v_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_0_1.AuxInt + if n != v_0_0_0.Args[1] { + break + } + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 15 && kbar == 16-k) { + break + } + v.reset(OpEq16) + v0 := b.NewValue0(v.Pos, OpAnd16, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst16, t) + v1.AuxInt = int64(1< n (Rsh32Ux64 (Rsh32x64 n (Const64 [31])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 31 && kbar == 32 - k + // result: (Eq32 (And32 n (Const32 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh32x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh32x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd32 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + if n != v_1_0_0.Args[0] { + break + } + v_1_0_0_1 := v_1_0_0.Args[1] + if v_1_0_0_1.Op != OpRsh32Ux64 { + break + } + if v_1_0_0_1.Type != t { + break + } + _ = v_1_0_0_1.Args[1] + v_1_0_0_1_0 := v_1_0_0_1.Args[0] + if v_1_0_0_1_0.Op != OpRsh32x64 { + break + } + if v_1_0_0_1_0.Type != t { + break + } + _ = v_1_0_0_1_0.Args[1] + if n != v_1_0_0_1_0.Args[0] { + break + } + v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1] + if v_1_0_0_1_0_1.Op != OpConst64 { + break + } + if v_1_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_1_0_1.AuxInt != 31 { + break + } + v_1_0_0_1_1 := v_1_0_0_1.Args[1] + if v_1_0_0_1_1.Op != OpConst64 { + break + } + if v_1_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_1_1.AuxInt + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 31 && kbar == 32-k) { + break + } + v.reset(OpEq32) + v0 := b.NewValue0(v.Pos, OpAnd32, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst32, t) + v1.AuxInt = int64(1< (Rsh32Ux64 (Rsh32x64 n (Const64 [31])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 31 && kbar == 32 - k + // result: (Eq32 (And32 n (Const32 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh32x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh32x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd32 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + v_1_0_0_0 := v_1_0_0.Args[0] + if v_1_0_0_0.Op != OpRsh32Ux64 { + break + } + if v_1_0_0_0.Type != t { + break + } + _ = v_1_0_0_0.Args[1] + v_1_0_0_0_0 := v_1_0_0_0.Args[0] + if v_1_0_0_0_0.Op != OpRsh32x64 { + break + } + if v_1_0_0_0_0.Type != t { + break + } + _ = v_1_0_0_0_0.Args[1] + if n != v_1_0_0_0_0.Args[0] { + break + } + v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1] + if v_1_0_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_0_0_1.AuxInt != 31 { + break + } + v_1_0_0_0_1 := v_1_0_0_0.Args[1] + if v_1_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_0_1.AuxInt + if n != v_1_0_0.Args[1] { + break + } + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 31 && kbar == 32-k) { + break + } + v.reset(OpEq32) + v0 := b.NewValue0(v.Pos, OpAnd32, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst32, t) + v1.AuxInt = int64(1< n (Rsh32Ux64 (Rsh32x64 n (Const64 [31])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 31 && kbar == 32 - k + // result: (Eq32 (And32 n (Const32 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh32x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh32x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd32 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + if n != v_0_0_0.Args[0] { + break + } + v_0_0_0_1 := v_0_0_0.Args[1] + if v_0_0_0_1.Op != OpRsh32Ux64 { + break + } + if v_0_0_0_1.Type != t { + break + } + _ = v_0_0_0_1.Args[1] + v_0_0_0_1_0 := v_0_0_0_1.Args[0] + if v_0_0_0_1_0.Op != OpRsh32x64 { + break + } + if v_0_0_0_1_0.Type != t { + break + } + _ = v_0_0_0_1_0.Args[1] + if n != v_0_0_0_1_0.Args[0] { + break + } + v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1] + if v_0_0_0_1_0_1.Op != OpConst64 { + break + } + if v_0_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_1_0_1.AuxInt != 31 { + break + } + v_0_0_0_1_1 := v_0_0_0_1.Args[1] + if v_0_0_0_1_1.Op != OpConst64 { + break + } + if v_0_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_1_1.AuxInt + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 31 && kbar == 32-k) { + break + } + v.reset(OpEq32) + v0 := b.NewValue0(v.Pos, OpAnd32, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst32, t) + v1.AuxInt = int64(1< (Rsh32Ux64 (Rsh32x64 n (Const64 [31])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 31 && kbar == 32 - k + // result: (Eq32 (And32 n (Const32 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh32x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh32x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd32 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + v_0_0_0_0 := v_0_0_0.Args[0] + if v_0_0_0_0.Op != OpRsh32Ux64 { + break + } + if v_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0.Args[1] + v_0_0_0_0_0 := v_0_0_0_0.Args[0] + if v_0_0_0_0_0.Op != OpRsh32x64 { + break + } + if v_0_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0_0.Args[1] + if n != v_0_0_0_0_0.Args[0] { + break + } + v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1] + if v_0_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_0_0_1.AuxInt != 31 { + break + } + v_0_0_0_0_1 := v_0_0_0_0.Args[1] + if v_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_0_1.AuxInt + if n != v_0_0_0.Args[1] { + break + } + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 31 && kbar == 32-k) { + break + } + v.reset(OpEq32) + v0 := b.NewValue0(v.Pos, OpAnd32, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst32, t) + v1.AuxInt = int64(1< n (Rsh64Ux64 (Rsh64x64 n (Const64 [63])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 63 && kbar == 64 - k + // result: (Eq64 (And64 n (Const64 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh64x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh64x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd64 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + if n != v_1_0_0.Args[0] { + break + } + v_1_0_0_1 := v_1_0_0.Args[1] + if v_1_0_0_1.Op != OpRsh64Ux64 { + break + } + if v_1_0_0_1.Type != t { + break + } + _ = v_1_0_0_1.Args[1] + v_1_0_0_1_0 := v_1_0_0_1.Args[0] + if v_1_0_0_1_0.Op != OpRsh64x64 { + break + } + if v_1_0_0_1_0.Type != t { + break + } + _ = v_1_0_0_1_0.Args[1] + if n != v_1_0_0_1_0.Args[0] { + break + } + v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1] + if v_1_0_0_1_0_1.Op != OpConst64 { + break + } + if v_1_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_1_0_1.AuxInt != 63 { + break + } + v_1_0_0_1_1 := v_1_0_0_1.Args[1] + if v_1_0_0_1_1.Op != OpConst64 { + break + } + if v_1_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_1_1.AuxInt + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 63 && kbar == 64-k) { + break + } + v.reset(OpEq64) + v0 := b.NewValue0(v.Pos, OpAnd64, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst64, t) + v1.AuxInt = int64(1< (Rsh64Ux64 (Rsh64x64 n (Const64 [63])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 63 && kbar == 64 - k + // result: (Eq64 (And64 n (Const64 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh64x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh64x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd64 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + v_1_0_0_0 := v_1_0_0.Args[0] + if v_1_0_0_0.Op != OpRsh64Ux64 { + break + } + if v_1_0_0_0.Type != t { + break + } + _ = v_1_0_0_0.Args[1] + v_1_0_0_0_0 := v_1_0_0_0.Args[0] + if v_1_0_0_0_0.Op != OpRsh64x64 { + break + } + if v_1_0_0_0_0.Type != t { + break + } + _ = v_1_0_0_0_0.Args[1] + if n != v_1_0_0_0_0.Args[0] { + break + } + v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1] + if v_1_0_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_0_0_1.AuxInt != 63 { + break + } + v_1_0_0_0_1 := v_1_0_0_0.Args[1] + if v_1_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_0_1.AuxInt + if n != v_1_0_0.Args[1] { + break + } + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 63 && kbar == 64-k) { + break + } + v.reset(OpEq64) + v0 := b.NewValue0(v.Pos, OpAnd64, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst64, t) + v1.AuxInt = int64(1< n (Rsh64Ux64 (Rsh64x64 n (Const64 [63])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 63 && kbar == 64 - k + // result: (Eq64 (And64 n (Const64 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh64x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh64x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd64 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + if n != v_0_0_0.Args[0] { + break + } + v_0_0_0_1 := v_0_0_0.Args[1] + if v_0_0_0_1.Op != OpRsh64Ux64 { + break + } + if v_0_0_0_1.Type != t { + break + } + _ = v_0_0_0_1.Args[1] + v_0_0_0_1_0 := v_0_0_0_1.Args[0] + if v_0_0_0_1_0.Op != OpRsh64x64 { + break + } + if v_0_0_0_1_0.Type != t { + break + } + _ = v_0_0_0_1_0.Args[1] + if n != v_0_0_0_1_0.Args[0] { + break + } + v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1] + if v_0_0_0_1_0_1.Op != OpConst64 { + break + } + if v_0_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_1_0_1.AuxInt != 63 { + break + } + v_0_0_0_1_1 := v_0_0_0_1.Args[1] + if v_0_0_0_1_1.Op != OpConst64 { + break + } + if v_0_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_1_1.AuxInt + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 63 && kbar == 64-k) { + break + } + v.reset(OpEq64) + v0 := b.NewValue0(v.Pos, OpAnd64, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst64, t) + v1.AuxInt = int64(1< (Rsh64Ux64 (Rsh64x64 n (Const64 [63])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 63 && kbar == 64 - k + // result: (Eq64 (And64 n (Const64 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh64x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh64x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd64 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + v_0_0_0_0 := v_0_0_0.Args[0] + if v_0_0_0_0.Op != OpRsh64Ux64 { + break + } + if v_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0.Args[1] + v_0_0_0_0_0 := v_0_0_0_0.Args[0] + if v_0_0_0_0_0.Op != OpRsh64x64 { + break + } + if v_0_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0_0.Args[1] + if n != v_0_0_0_0_0.Args[0] { + break + } + v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1] + if v_0_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_0_0_1.AuxInt != 63 { + break + } + v_0_0_0_0_1 := v_0_0_0_0.Args[1] + if v_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_0_1.AuxInt + if n != v_0_0_0.Args[1] { + break + } + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 63 && kbar == 64-k) { + break + } + v.reset(OpEq64) + v0 := b.NewValue0(v.Pos, OpAnd64, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst64, t) + v1.AuxInt = int64(1< n (Rsh8Ux64 (Rsh8x64 n (Const64 [ 7])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 7 && kbar == 8 - k + // result: (Eq8 (And8 n (Const8 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh8x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh8x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd8 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + if n != v_1_0_0.Args[0] { + break + } + v_1_0_0_1 := v_1_0_0.Args[1] + if v_1_0_0_1.Op != OpRsh8Ux64 { + break + } + if v_1_0_0_1.Type != t { + break + } + _ = v_1_0_0_1.Args[1] + v_1_0_0_1_0 := v_1_0_0_1.Args[0] + if v_1_0_0_1_0.Op != OpRsh8x64 { + break + } + if v_1_0_0_1_0.Type != t { + break + } + _ = v_1_0_0_1_0.Args[1] + if n != v_1_0_0_1_0.Args[0] { + break + } + v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1] + if v_1_0_0_1_0_1.Op != OpConst64 { + break + } + if v_1_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_1_0_1.AuxInt != 7 { + break + } + v_1_0_0_1_1 := v_1_0_0_1.Args[1] + if v_1_0_0_1_1.Op != OpConst64 { + break + } + if v_1_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_1_1.AuxInt + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 7 && kbar == 8-k) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Pos, OpAnd8, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst8, t) + v1.AuxInt = int64(1< (Rsh8Ux64 (Rsh8x64 n (Const64 [ 7])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k]))) + // cond: k > 0 && k < 7 && kbar == 8 - k + // result: (Eq8 (And8 n (Const8 [int64(1< [0])) + for { + _ = v.Args[1] + n := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpLsh8x64 { + break + } + _ = v_1.Args[1] + v_1_0 := v_1.Args[0] + if v_1_0.Op != OpRsh8x64 { + break + } + _ = v_1_0.Args[1] + v_1_0_0 := v_1_0.Args[0] + if v_1_0_0.Op != OpAdd8 { + break + } + t := v_1_0_0.Type + _ = v_1_0_0.Args[1] + v_1_0_0_0 := v_1_0_0.Args[0] + if v_1_0_0_0.Op != OpRsh8Ux64 { + break + } + if v_1_0_0_0.Type != t { + break + } + _ = v_1_0_0_0.Args[1] + v_1_0_0_0_0 := v_1_0_0_0.Args[0] + if v_1_0_0_0_0.Op != OpRsh8x64 { + break + } + if v_1_0_0_0_0.Type != t { + break + } + _ = v_1_0_0_0_0.Args[1] + if n != v_1_0_0_0_0.Args[0] { + break + } + v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1] + if v_1_0_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_1_0_0_0_0_1.AuxInt != 7 { + break + } + v_1_0_0_0_1 := v_1_0_0_0.Args[1] + if v_1_0_0_0_1.Op != OpConst64 { + break + } + if v_1_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_1_0_0_0_1.AuxInt + if n != v_1_0_0.Args[1] { + break + } + v_1_0_1 := v_1_0.Args[1] + if v_1_0_1.Op != OpConst64 { + break + } + if v_1_0_1.Type != typ.UInt64 { + break + } + k := v_1_0_1.AuxInt + v_1_1 := v_1.Args[1] + if v_1_1.Op != OpConst64 { + break + } + if v_1_1.Type != typ.UInt64 { + break + } + if v_1_1.AuxInt != k { + break + } + if !(k > 0 && k < 7 && kbar == 8-k) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Pos, OpAnd8, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst8, t) + v1.AuxInt = int64(1< n (Rsh8Ux64 (Rsh8x64 n (Const64 [ 7])) (Const64 [kbar]))) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 7 && kbar == 8 - k + // result: (Eq8 (And8 n (Const8 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh8x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh8x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd8 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + if n != v_0_0_0.Args[0] { + break + } + v_0_0_0_1 := v_0_0_0.Args[1] + if v_0_0_0_1.Op != OpRsh8Ux64 { + break + } + if v_0_0_0_1.Type != t { + break + } + _ = v_0_0_0_1.Args[1] + v_0_0_0_1_0 := v_0_0_0_1.Args[0] + if v_0_0_0_1_0.Op != OpRsh8x64 { + break + } + if v_0_0_0_1_0.Type != t { + break + } + _ = v_0_0_0_1_0.Args[1] + if n != v_0_0_0_1_0.Args[0] { + break + } + v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1] + if v_0_0_0_1_0_1.Op != OpConst64 { + break + } + if v_0_0_0_1_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_1_0_1.AuxInt != 7 { + break + } + v_0_0_0_1_1 := v_0_0_0_1.Args[1] + if v_0_0_0_1_1.Op != OpConst64 { + break + } + if v_0_0_0_1_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_1_1.AuxInt + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 7 && kbar == 8-k) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Pos, OpAnd8, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst8, t) + v1.AuxInt = int64(1< (Rsh8Ux64 (Rsh8x64 n (Const64 [ 7])) (Const64 [kbar])) n) (Const64 [k])) (Const64 [k])) n) + // cond: k > 0 && k < 7 && kbar == 8 - k + // result: (Eq8 (And8 n (Const8 [int64(1< [0])) + for { + n := v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpLsh8x64 { + break + } + _ = v_0.Args[1] + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpRsh8x64 { + break + } + _ = v_0_0.Args[1] + v_0_0_0 := v_0_0.Args[0] + if v_0_0_0.Op != OpAdd8 { + break + } + t := v_0_0_0.Type + _ = v_0_0_0.Args[1] + v_0_0_0_0 := v_0_0_0.Args[0] + if v_0_0_0_0.Op != OpRsh8Ux64 { + break + } + if v_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0.Args[1] + v_0_0_0_0_0 := v_0_0_0_0.Args[0] + if v_0_0_0_0_0.Op != OpRsh8x64 { + break + } + if v_0_0_0_0_0.Type != t { + break + } + _ = v_0_0_0_0_0.Args[1] + if n != v_0_0_0_0_0.Args[0] { + break + } + v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1] + if v_0_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_0_1.Type != typ.UInt64 { + break + } + if v_0_0_0_0_0_1.AuxInt != 7 { + break + } + v_0_0_0_0_1 := v_0_0_0_0.Args[1] + if v_0_0_0_0_1.Op != OpConst64 { + break + } + if v_0_0_0_0_1.Type != typ.UInt64 { + break + } + kbar := v_0_0_0_0_1.AuxInt + if n != v_0_0_0.Args[1] { + break + } + v_0_0_1 := v_0_0.Args[1] + if v_0_0_1.Op != OpConst64 { + break + } + if v_0_0_1.Type != typ.UInt64 { + break + } + k := v_0_0_1.AuxInt + v_0_1 := v_0.Args[1] + if v_0_1.Op != OpConst64 { + break + } + if v_0_1.Type != typ.UInt64 { + break + } + if v_0_1.AuxInt != k { + break + } + if !(k > 0 && k < 7 && kbar == 8-k) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Pos, OpAnd8, t) + v0.AddArg(n) + v1 := b.NewValue0(v.Pos, OpConst8, t) + v1.AuxInt = int64(1<