package main
import (
+ "math"
"runtime"
"testing"
)
testShiftRemoval(t)
testShiftedOps(t)
testDivFixUp(t)
+ testDivisibleSignedPow2(t)
}
// testDivFixUp ensures that signed division fix-ups are being generated.
g64 = z % int64(i)
}
}
+
+//go:noinline
+func divisible_int8_2to1(x int8) bool {
+ return x%(1<<1) == 0
+}
+
+//go:noinline
+func divisible_int8_2to2(x int8) bool {
+ return x%(1<<2) == 0
+}
+
+//go:noinline
+func divisible_int8_2to3(x int8) bool {
+ return x%(1<<3) == 0
+}
+
+//go:noinline
+func divisible_int8_2to4(x int8) bool {
+ return x%(1<<4) == 0
+}
+
+//go:noinline
+func divisible_int8_2to5(x int8) bool {
+ return x%(1<<5) == 0
+}
+
+//go:noinline
+func divisible_int8_2to6(x int8) bool {
+ return x%(1<<6) == 0
+}
+
+//go:noinline
+func divisible_int16_2to1(x int16) bool {
+ return x%(1<<1) == 0
+}
+
+//go:noinline
+func divisible_int16_2to2(x int16) bool {
+ return x%(1<<2) == 0
+}
+
+//go:noinline
+func divisible_int16_2to3(x int16) bool {
+ return x%(1<<3) == 0
+}
+
+//go:noinline
+func divisible_int16_2to4(x int16) bool {
+ return x%(1<<4) == 0
+}
+
+//go:noinline
+func divisible_int16_2to5(x int16) bool {
+ return x%(1<<5) == 0
+}
+
+//go:noinline
+func divisible_int16_2to6(x int16) bool {
+ return x%(1<<6) == 0
+}
+
+//go:noinline
+func divisible_int16_2to7(x int16) bool {
+ return x%(1<<7) == 0
+}
+
+//go:noinline
+func divisible_int16_2to8(x int16) bool {
+ return x%(1<<8) == 0
+}
+
+//go:noinline
+func divisible_int16_2to9(x int16) bool {
+ return x%(1<<9) == 0
+}
+
+//go:noinline
+func divisible_int16_2to10(x int16) bool {
+ return x%(1<<10) == 0
+}
+
+//go:noinline
+func divisible_int16_2to11(x int16) bool {
+ return x%(1<<11) == 0
+}
+
+//go:noinline
+func divisible_int16_2to12(x int16) bool {
+ return x%(1<<12) == 0
+}
+
+//go:noinline
+func divisible_int16_2to13(x int16) bool {
+ return x%(1<<13) == 0
+}
+
+//go:noinline
+func divisible_int16_2to14(x int16) bool {
+ return x%(1<<14) == 0
+}
+
+//go:noinline
+func divisible_int32_2to4(x int32) bool {
+ return x%(1<<4) == 0
+}
+
+//go:noinline
+func divisible_int32_2to15(x int32) bool {
+ return x%(1<<15) == 0
+}
+
+//go:noinline
+func divisible_int32_2to26(x int32) bool {
+ return x%(1<<26) == 0
+}
+
+//go:noinline
+func divisible_int64_2to4(x int64) bool {
+ return x%(1<<4) == 0
+}
+
+//go:noinline
+func divisible_int64_2to15(x int64) bool {
+ return x%(1<<15) == 0
+}
+
+//go:noinline
+func divisible_int64_2to26(x int64) bool {
+ return x%(1<<26) == 0
+}
+
+//go:noinline
+func divisible_int64_2to34(x int64) bool {
+ return x%(1<<34) == 0
+}
+
+//go:noinline
+func divisible_int64_2to48(x int64) bool {
+ return x%(1<<48) == 0
+}
+
+//go:noinline
+func divisible_int64_2to57(x int64) bool {
+ return x%(1<<57) == 0
+}
+
+// testDivisibleSignedPow2 confirms that x%(1<<k)==0 is rewritten correctly
+func testDivisibleSignedPow2(t *testing.T) {
+ var i int64
+ var pow2 = []int64{
+ 1,
+ 1 << 1,
+ 1 << 2,
+ 1 << 3,
+ 1 << 4,
+ 1 << 5,
+ 1 << 6,
+ 1 << 7,
+ 1 << 8,
+ 1 << 9,
+ 1 << 10,
+ 1 << 11,
+ 1 << 12,
+ 1 << 13,
+ 1 << 14,
+ }
+ // exhaustive test for int8
+ for i = math.MinInt8; i <= math.MaxInt8; i++ {
+ if want, got := int8(i)%int8(pow2[1]) == 0, divisible_int8_2to1(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to1(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int8(i)%int8(pow2[2]) == 0, divisible_int8_2to2(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to2(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int8(i)%int8(pow2[3]) == 0, divisible_int8_2to3(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to3(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int8(i)%int8(pow2[4]) == 0, divisible_int8_2to4(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to4(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int8(i)%int8(pow2[5]) == 0, divisible_int8_2to5(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to5(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int8(i)%int8(pow2[6]) == 0, divisible_int8_2to6(int8(i)); got != want {
+ t.Errorf("divisible_int8_2to6(%d) = %v want %v", i, got, want)
+ }
+ }
+ // exhaustive test for int16
+ for i = math.MinInt16; i <= math.MaxInt16; i++ {
+ if want, got := int16(i)%int16(pow2[1]) == 0, divisible_int16_2to1(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to1(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[2]) == 0, divisible_int16_2to2(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to2(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[3]) == 0, divisible_int16_2to3(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to3(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[4]) == 0, divisible_int16_2to4(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to4(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[5]) == 0, divisible_int16_2to5(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to5(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[6]) == 0, divisible_int16_2to6(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to6(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[7]) == 0, divisible_int16_2to7(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to7(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[8]) == 0, divisible_int16_2to8(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to8(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[9]) == 0, divisible_int16_2to9(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to9(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[10]) == 0, divisible_int16_2to10(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to10(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[11]) == 0, divisible_int16_2to11(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to11(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[12]) == 0, divisible_int16_2to12(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to12(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[13]) == 0, divisible_int16_2to13(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to13(%d) = %v want %v", i, got, want)
+ }
+ if want, got := int16(i)%int16(pow2[14]) == 0, divisible_int16_2to14(int16(i)); got != want {
+ t.Errorf("divisible_int16_2to14(%d) = %v want %v", i, got, want)
+ }
+ }
+ // spot check for int32 and int64
+ var (
+ two4 int64 = 1 << 4
+ two15 int64 = 1 << 15
+ two26 int64 = 1 << 26
+ two34 int64 = 1 << 34
+ two48 int64 = 1 << 48
+ two57 int64 = 1 << 57
+ )
+ var xs = []int64{two4, two4 + 3, -3 * two4, -3*two4 + 1,
+ two15, two15 + 3, -3 * two15, -3*two15 + 1,
+ two26, two26 + 37, -5 * two26, -5*two26 + 2,
+ two34, two34 + 356, -7 * two34, -7*two34 + 13,
+ two48, two48 + 3000, -12 * two48, -12*two48 + 1111,
+ two57, two57 + 397654, -15 * two57, -15*two57 + 11234,
+ }
+ for _, x := range xs {
+ if int64(int32(x)) == x {
+ if want, got := int32(x)%int32(two4) == 0, divisible_int32_2to4(int32(x)); got != want {
+ t.Errorf("divisible_int32_2to4(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := int32(x)%int32(two15) == 0, divisible_int32_2to15(int32(x)); got != want {
+ t.Errorf("divisible_int32_2to15(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := int32(x)%int32(two26) == 0, divisible_int32_2to26(int32(x)); got != want {
+ t.Errorf("divisible_int32_2to26(%d) = %v want %v", x, got, want)
+ }
+ }
+ // spot check for int64
+ if want, got := x%two4 == 0, divisible_int64_2to4(x); got != want {
+ t.Errorf("divisible_int64_2to4(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := x%two15 == 0, divisible_int64_2to15(x); got != want {
+ t.Errorf("divisible_int64_2to15(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := x%two26 == 0, divisible_int64_2to26(x); got != want {
+ t.Errorf("divisible_int64_2to26(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := x%two34 == 0, divisible_int64_2to34(x); got != want {
+ t.Errorf("divisible_int64_2to34(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := x%two48 == 0, divisible_int64_2to48(x); got != want {
+ t.Errorf("divisible_int64_2to48(%d) = %v want %v", x, got, want)
+ }
+
+ if want, got := x%two57 == 0, divisible_int64_2to57(x); got != want {
+ t.Errorf("divisible_int64_2to57(%d) = %v want %v", x, got, want)
+ }
+
+ }
+}
case OpDiv8u:
return rewriteValuegeneric_OpDiv8u_0(v)
case OpEq16:
- return rewriteValuegeneric_OpEq16_0(v)
+ return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v)
case OpEq32:
- return rewriteValuegeneric_OpEq32_0(v)
+ return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v)
case OpEq32F:
return rewriteValuegeneric_OpEq32F_0(v)
case OpEq64:
- return rewriteValuegeneric_OpEq64_0(v)
+ return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v)
case OpEq64F:
return rewriteValuegeneric_OpEq64F_0(v)
case OpEq8:
- return rewriteValuegeneric_OpEq8_0(v)
+ return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v)
case OpEqB:
return rewriteValuegeneric_OpEqB_0(v)
case OpEqInter:
}
func rewriteValuegeneric_OpEq16_0(v *Value) bool {
b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq16 x x)
// cond:
// result: (ConstBool [1])
v.AuxInt = b2i(c == d)
return true
}
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 15 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 15 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 15 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq16_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 15 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
// match: (Eq16 s:(Sub16 x y) (Const16 [0]))
// cond: s.Uses == 1
// result: (Eq16 x y)
}
func rewriteValuegeneric_OpEq32_0(v *Value) bool {
b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq32 x x)
// cond:
// result: (ConstBool [1])
v.AuxInt = b2i(c == d)
return true
}
- // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
// cond: s.Uses == 1
// result: (Eq32 x y)
for {
}
func rewriteValuegeneric_OpEq64_0(v *Value) bool {
b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq64 x x)
// cond:
// result: (ConstBool [1])
v.AuxInt = b2i(c == d)
return true
}
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
// match: (Eq64 s:(Sub64 x y) (Const64 [0]))
// cond: s.Uses == 1
// result: (Eq64 x y)
}
func rewriteValuegeneric_OpEq8_0(v *Value) bool {
b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq8 x x)
// cond:
// result: (ConstBool [1])
v.AuxInt = b2i(c == d)
return true
}
+ // match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 7 && kbar == 8 - k
+ // result: (Eq8 (And8 <t> n (Const8 <t> [int64(1<<uint(k)-1)])) (Const8 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh8x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh8x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd8 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh8Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh8x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 7 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 7 && kbar == 8-k) {
+ break
+ }
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpAnd8, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst8, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 7 && kbar == 8 - k
+ // result: (Eq8 (And8 <t> n (Const8 <t> [int64(1<<uint(k)-1)])) (Const8 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh8x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh8x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd8 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh8Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh8x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 7 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 7 && kbar == 8-k) {
+ break
+ }
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpAnd8, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst8, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq8 (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 7 && kbar == 8 - k
+ // result: (Eq8 (And8 <t> n (Const8 <t> [int64(1<<uint(k)-1)])) (Const8 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh8x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh8x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd8 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh8Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh8x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 7 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 7 && kbar == 8-k) {
+ break
+ }
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpAnd8, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst8, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq8 (Lsh8x64 (Rsh8x64 (Add8 <t> (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 7 && kbar == 8 - k
+ // result: (Eq8 (And8 <t> n (Const8 <t> [int64(1<<uint(k)-1)])) (Const8 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh8x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh8x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd8 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh8Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh8x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 7 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 7 && kbar == 8-k) {
+ break
+ }
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpAnd8, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst8, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
// match: (Eq8 s:(Sub8 x y) (Const8 [0]))
// cond: s.Uses == 1
// result: (Eq8 x y)