v.AddArg2(x, v0)
return true
}
+ // match: (Lsh16x64 i:(Rsh16x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 16 && i.Uses == 1
+ // result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh16x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd16)
+ v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
+ v0.AuxInt = int16ToAuxInt(int16(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
+ // match: (Lsh16x64 i:(Rsh16Ux64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 16 && i.Uses == 1
+ // result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh16Ux64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd16)
+ v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
+ v0.AuxInt = int16ToAuxInt(int16(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Lsh32x64 i:(Rsh32x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 32 && i.Uses == 1
+ // result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh32x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd32)
+ v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
+ v0.AuxInt = int32ToAuxInt(int32(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
+ // match: (Lsh32x64 i:(Rsh32Ux64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 32 && i.Uses == 1
+ // result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd32)
+ v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
+ v0.AuxInt = int32ToAuxInt(int32(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Lsh64x64 i:(Rsh64x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 64 && i.Uses == 1
+ // result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh64x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd64)
+ v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
+ v0.AuxInt = int64ToAuxInt(int64(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
+ // match: (Lsh64x64 i:(Rsh64Ux64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 64 && i.Uses == 1
+ // result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd64)
+ v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
+ v0.AuxInt = int64ToAuxInt(int64(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Lsh8x64 i:(Rsh8x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 8 && i.Uses == 1
+ // result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh8x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd8)
+ v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
+ v0.AuxInt = int8ToAuxInt(int8(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
+ // match: (Lsh8x64 i:(Rsh8Ux64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 8 && i.Uses == 1
+ // result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
+ for {
+ i := v_0
+ if i.Op != OpRsh8Ux64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd8)
+ v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
+ v0.AuxInt = int8ToAuxInt(int8(-1) << c)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Rsh16Ux64 i:(Lsh16x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 16 && i.Uses == 1
+ // result: (And16 x (Const16 <v.Type> [int16(^uint16(0)>>c)]))
+ for {
+ i := v_0
+ if i.Op != OpLsh16x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd16)
+ v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
+ v0.AuxInt = int16ToAuxInt(int16(^uint16(0) >> c))
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Rsh32Ux64 i:(Lsh32x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 32 && i.Uses == 1
+ // result: (And32 x (Const32 <v.Type> [int32(^uint32(0)>>c)]))
+ for {
+ i := v_0
+ if i.Op != OpLsh32x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd32)
+ v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
+ v0.AuxInt = int32ToAuxInt(int32(^uint32(0) >> c))
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Rsh64Ux64 i:(Lsh64x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 64 && i.Uses == 1
+ // result: (And64 x (Const64 <v.Type> [int64(^uint64(0)>>c)]))
+ for {
+ i := v_0
+ if i.Op != OpLsh64x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd64)
+ v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
+ v0.AuxInt = int64ToAuxInt(int64(^uint64(0) >> c))
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg2(x, v0)
return true
}
+ // match: (Rsh8Ux64 i:(Lsh8x64 x (Const64 [c])) (Const64 [c]))
+ // cond: c >= 0 && c < 8 && i.Uses == 1
+ // result: (And8 x (Const8 <v.Type> [int8 (^uint8 (0)>>c)]))
+ for {
+ i := v_0
+ if i.Op != OpLsh8x64 {
+ break
+ }
+ _ = i.Args[1]
+ x := i.Args[0]
+ i_1 := i.Args[1]
+ if i_1.Op != OpConst64 {
+ break
+ }
+ c := auxIntToInt64(i_1.AuxInt)
+ if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
+ break
+ }
+ v.reset(OpAnd8)
+ v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
+ v0.AuxInt = int8ToAuxInt(int8(^uint8(0) >> c))
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))