case OpPPC64NotEqual:
return rewriteValuePPC64_OpPPC64NotEqual_0(v)
case OpPPC64OR:
- return rewriteValuePPC64_OpPPC64OR_0(v)
+ return rewriteValuePPC64_OpPPC64OR_0(v) || rewriteValuePPC64_OpPPC64OR_10(v)
case OpPPC64ORN:
return rewriteValuePPC64_OpPPC64ORN_0(v)
case OpPPC64ORconst:
case OpPPC64SUB:
return rewriteValuePPC64_OpPPC64SUB_0(v)
case OpPPC64XOR:
- return rewriteValuePPC64_OpPPC64XOR_0(v)
+ return rewriteValuePPC64_OpPPC64XOR_0(v) || rewriteValuePPC64_OpPPC64XOR_10(v)
case OpPPC64XORconst:
return rewriteValuePPC64_OpPPC64XORconst_0(v)
case OpPopCount16:
v.AddArg(x)
return true
}
+ // match: (Lsh32x64 x (AND y (MOVDconst [31])))
+ // cond:
+ // result: (SLW x (ANDconst <typ.Int32> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Lsh32x64 x (AND (MOVDconst [31]) y))
+ // cond:
+ // result: (SLW x (ANDconst <typ.Int32> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[1]
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Lsh32x64 x (ANDconst <typ.Int32> [31] y))
+ // cond:
+ // result: (SLW x (ANDconst <typ.Int32> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1.Type != typ.Int32 {
+ break
+ }
+ if v_1.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
// match: (Lsh32x64 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
v.AddArg(x)
return true
}
+ // match: (Lsh64x64 x (AND y (MOVDconst [63])))
+ // cond:
+ // result: (SLD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Lsh64x64 x (AND (MOVDconst [63]) y))
+ // cond:
+ // result: (SLD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[1]
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Lsh64x64 x (ANDconst <typ.Int64> [63] y))
+ // cond:
+ // result: (SLD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1.Type != typ.Int64 {
+ break
+ }
+ if v_1.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
// match: (Lsh64x64 x y)
// cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
}
}
func rewriteValuePPC64_OpPPC64ADD_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (ADD (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
v.AddArg(x)
return true
}
- // match: (ADD x (MOVDconst [c]))
- // cond: is32Bit(c)
- // result: (ADDconst [c] x)
+ // match: (ADD (SLD x (ANDconst <typ.Int64> [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
+ // cond:
+ // result: (ROTL x y)
for {
_ = v.Args[1]
- x := v.Args[0]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SLD {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1.Type != typ.Int64 {
+ break
+ }
+ if v_0_1.AuxInt != 63 {
+ break
+ }
+ y := v_0_1.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64SRD {
break
}
- c := v_1.AuxInt
- if !(is32Bit(c)) {
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
break
}
- v.reset(OpPPC64ADDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (ADD (MOVDconst [c]) x)
- // cond: is32Bit(c)
- // result: (ADDconst [c] x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
break
}
- c := v_0.AuxInt
- x := v.Args[1]
- if !(is32Bit(c)) {
+ if v_1_1.Type != typ.UInt {
break
}
- v.reset(OpPPC64ADDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- return false
-}
-func rewriteValuePPC64_OpPPC64ADDconst_0(v *Value) bool {
- // match: (ADDconst [c] (ADDconst [d] x))
- // cond: is32Bit(c+d)
- // result: (ADDconst [c+d] x)
- for {
- c := v.AuxInt
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64ADDconst {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
break
}
- d := v_0.AuxInt
- x := v_0.Args[0]
- if !(is32Bit(c + d)) {
+ if v_1_1_0.AuxInt != 64 {
break
}
- v.reset(OpPPC64ADDconst)
- v.AuxInt = c + d
- v.AddArg(x)
- return true
- }
- // match: (ADDconst [0] x)
- // cond:
- // result: x
- for {
- if v.AuxInt != 0 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
break
}
- x := v.Args[0]
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
- // match: (ADDconst [c] (MOVDaddr [d] {sym} x))
- // cond:
- // result: (MOVDaddr [c+d] {sym} x)
- for {
- c := v.AuxInt
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDaddr {
+ if v_1_1_1.Type != typ.UInt {
break
}
- d := v_0.AuxInt
- sym := v_0.Aux
- x := v_0.Args[0]
- v.reset(OpPPC64MOVDaddr)
- v.AuxInt = c + d
- v.Aux = sym
- v.AddArg(x)
- return true
- }
- return false
-}
-func rewriteValuePPC64_OpPPC64AND_0(v *Value) bool {
- // match: (AND x (NOR y y))
- // cond:
- // result: (ANDN x y)
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpPPC64NOR {
+ if v_1_1_1.AuxInt != 63 {
break
}
- _ = v_1.Args[1]
- y := v_1.Args[0]
- if y != v_1.Args[1] {
+ if y != v_1_1_1.Args[0] {
break
}
- v.reset(OpPPC64ANDN)
+ v.reset(OpPPC64ROTL)
v.AddArg(x)
v.AddArg(y)
return true
}
- // match: (AND (NOR y y) x)
+ // match: (ADD (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))) (SLD x (ANDconst <typ.Int64> [63] y)))
// cond:
- // result: (ANDN x y)
+ // result: (ROTL x y)
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpPPC64NOR {
+ if v_0.Op != OpPPC64SRD {
break
}
_ = v_0.Args[1]
- y := v_0.Args[0]
- if y != v_0.Args[1] {
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
break
}
- x := v.Args[1]
- v.reset(OpPPC64ANDN)
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 64 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 63 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLD {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int64 {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTL)
v.AddArg(x)
v.AddArg(y)
return true
}
- // match: (AND (MOVDconst [c]) (MOVDconst [d]))
+ // match: (ADD (SLW x (ANDconst <typ.Int32> [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
// cond:
- // result: (MOVDconst [c&d])
+ // result: (ROTLW x y)
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ if v_0.Op != OpPPC64SLW {
break
}
- c := v_0.AuxInt
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1.Type != typ.Int32 {
+ break
+ }
+ if v_0_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (ADD (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))) (SLW x (ANDconst <typ.Int32> [31] y)))
+ // cond:
+ // result: (ROTLW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 32 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int32 {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (ADD x (MOVDconst [c]))
+ // cond: is32Bit(c)
+ // result: (ADDconst [c] x)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(is32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ADDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (ADD (MOVDconst [c]) x)
+ // cond: is32Bit(c)
+ // result: (ADDconst [c] x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v.Args[1]
+ if !(is32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ADDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ADDconst_0(v *Value) bool {
+ // match: (ADDconst [c] (ADDconst [d] x))
+ // cond: is32Bit(c+d)
+ // result: (ADDconst [c+d] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ADDconst {
+ break
+ }
+ d := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(is32Bit(c + d)) {
+ break
+ }
+ v.reset(OpPPC64ADDconst)
+ v.AuxInt = c + d
+ v.AddArg(x)
+ return true
+ }
+ // match: (ADDconst [0] x)
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ x := v.Args[0]
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ADDconst [c] (MOVDaddr [d] {sym} x))
+ // cond:
+ // result: (MOVDaddr [c+d] {sym} x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDaddr {
+ break
+ }
+ d := v_0.AuxInt
+ sym := v_0.Aux
+ x := v_0.Args[0]
+ v.reset(OpPPC64MOVDaddr)
+ v.AuxInt = c + d
+ v.Aux = sym
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64AND_0(v *Value) bool {
+ // match: (AND x (NOR y y))
+ // cond:
+ // result: (ANDN x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64NOR {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ if y != v_1.Args[1] {
+ break
+ }
+ v.reset(OpPPC64ANDN)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (AND (NOR y y) x)
+ // cond:
+ // result: (ANDN x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64NOR {
+ break
+ }
+ _ = v_0.Args[1]
+ y := v_0.Args[0]
+ if y != v_0.Args[1] {
+ break
+ }
+ x := v.Args[1]
+ v.reset(OpPPC64ANDN)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (AND (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c&d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
v_1 := v.Args[1]
if v_1.Op != OpPPC64MOVDconst {
break
return false
}
func rewriteValuePPC64_OpPPC64OR_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (OR (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
v.AddArg(x)
return true
}
- // match: (OR (MOVDconst [c]) (MOVDconst [d]))
+ // match: (OR (SLD x (ANDconst <typ.Int64> [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
// cond:
- // result: (MOVDconst [c|d])
+ // result: (ROTL x y)
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ if v_0.Op != OpPPC64SLD {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
break
}
- d := v_1.AuxInt
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = c | d
- return true
- }
- // match: (OR (MOVDconst [d]) (MOVDconst [c]))
- // cond:
- // result: (MOVDconst [c|d])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ if v_0_1.Type != typ.Int64 {
break
}
- d := v_0.AuxInt
+ if v_0_1.AuxInt != 63 {
+ break
+ }
+ y := v_0_1.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64SRD {
break
}
- c := v_1.AuxInt
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = c | d
- return true
- }
- // match: (OR x (MOVDconst [c]))
- // cond: isU32Bit(c)
- // result: (ORconst [c] x)
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1_1.AuxInt != 63 {
+ break
+ }
+ if y != v_1_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTL)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (OR (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))) (SLD x (ANDconst <typ.Int64> [63] y)))
+ // cond:
+ // result: (ROTL x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SRD {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 64 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 63 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLD {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int64 {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTL)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (OR (SLW x (ANDconst <typ.Int32> [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
+ // cond:
+ // result: (ROTLW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SLW {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1.Type != typ.Int32 {
+ break
+ }
+ if v_0_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (OR (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))) (SLW x (ANDconst <typ.Int32> [31] y)))
+ // cond:
+ // result: (ROTLW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 32 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int32 {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (OR (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c|d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c | d
+ return true
+ }
+ // match: (OR (MOVDconst [d]) (MOVDconst [c]))
+ // cond:
+ // result: (MOVDconst [c|d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c | d
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64OR_10(v *Value) bool {
+ // match: (OR x (MOVDconst [c]))
+ // cond: isU32Bit(c)
+ // result: (ORconst [c] x)
for {
_ = v.Args[1]
x := v.Args[0]
return false
}
func rewriteValuePPC64_OpPPC64XOR_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (XOR (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
v.AddArg(x)
return true
}
- // match: (XOR (MOVDconst [c]) (MOVDconst [d]))
+ // match: (XOR (SLD x (ANDconst <typ.Int64> [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
// cond:
- // result: (MOVDconst [c^d])
+ // result: (ROTL x y)
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ if v_0.Op != OpPPC64SLD {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
break
}
- d := v_1.AuxInt
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = c ^ d
- return true
- }
- // match: (XOR (MOVDconst [d]) (MOVDconst [c]))
- // cond:
- // result: (MOVDconst [c^d])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ if v_0_1.Type != typ.Int64 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_0_1.AuxInt != 63 {
break
}
- c := v_1.AuxInt
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = c ^ d
- return true
- }
- // match: (XOR x (MOVDconst [c]))
- // cond: isU32Bit(c)
- // result: (XORconst [c] x)
- for {
- _ = v.Args[1]
- x := v.Args[0]
+ y := v_0_1.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64SRD {
break
}
- c := v_1.AuxInt
- if !(isU32Bit(c)) {
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
break
}
- v.reset(OpPPC64XORconst)
- v.AuxInt = c
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1_1.AuxInt != 63 {
+ break
+ }
+ if y != v_1_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTL)
v.AddArg(x)
+ v.AddArg(y)
return true
}
- // match: (XOR (MOVDconst [c]) x)
- // cond: isU32Bit(c)
- // result: (XORconst [c] x)
+ // match: (XOR (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))) (SLD x (ANDconst <typ.Int64> [63] y)))
+ // cond:
+ // result: (ROTL x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SRD {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 64 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 63 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLD {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int64 {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTL)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (XOR (SLW x (ANDconst <typ.Int32> [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
+ // cond:
+ // result: (ROTLW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SLW {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1.Type != typ.Int32 {
+ break
+ }
+ if v_0_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (XOR (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))) (SLW x (ANDconst <typ.Int32> [31] y)))
+ // cond:
+ // result: (ROTLW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64SRW {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_0_1.Type != typ.UInt {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0_1_0.AuxInt != 32 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_0_1_1.Type != typ.UInt {
+ break
+ }
+ if v_0_1_1.AuxInt != 31 {
+ break
+ }
+ y := v_0_1_1.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SLW {
+ break
+ }
+ _ = v_1.Args[1]
+ if x != v_1.Args[0] {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.Int32 {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ if y != v_1_1.Args[0] {
+ break
+ }
+ v.reset(OpPPC64ROTLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (XOR (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c^d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c ^ d
+ return true
+ }
+ // match: (XOR (MOVDconst [d]) (MOVDconst [c]))
+ // cond:
+ // result: (MOVDconst [c^d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c ^ d
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64XOR_10(v *Value) bool {
+ // match: (XOR x (MOVDconst [c]))
+ // cond: isU32Bit(c)
+ // result: (XORconst [c] x)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isU32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (XOR (MOVDconst [c]) x)
+ // cond: isU32Bit(c)
+ // result: (XORconst [c] x)
for {
_ = v.Args[1]
v_0 := v.Args[0]
v.AddArg(x)
return true
}
- // match: (Rsh32Ux64 x y)
+ // match: (Rsh32Ux64 x (AND y (MOVDconst [31])))
// cond:
- // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
+ // result: (SRW x (ANDconst <typ.Int32> [31] y))
for {
_ = v.Args[1]
x := v.Args[0]
- y := v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
v.reset(OpPPC64SRW)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
- v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
- v2.AuxInt = -32
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
v.AddArg(v0)
return true
}
-}
-func rewriteValuePPC64_OpRsh32Ux8_0(v *Value) bool {
- b := v.Block
- _ = b
- typ := &b.Func.Config.Types
- _ = typ
- // match: (Rsh32Ux8 x y)
+ // match: (Rsh32Ux64 x (AND (MOVDconst [31]) y))
// cond:
- // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
+ // result: (SRW x (ANDconst <typ.Int32> [31] y))
for {
_ = v.Args[1]
x := v.Args[0]
- y := v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[1]
v.reset(OpPPC64SRW)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
- v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
- v2.AuxInt = -32
- v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
- v0.AddArg(v1)
v.AddArg(v0)
return true
}
-}
-func rewriteValuePPC64_OpRsh32x16_0(v *Value) bool {
- b := v.Block
- _ = b
- typ := &b.Func.Config.Types
- _ = typ
- // match: (Rsh32x16 x y)
+ // match: (Rsh32Ux64 x (ANDconst <typ.UInt> [31] y))
// cond:
- // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
+ // result: (SRW x (ANDconst <typ.UInt> [31] y))
for {
_ = v.Args[1]
x := v.Args[0]
- y := v.Args[1]
- v.reset(OpPPC64SRAW)
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ if v_1.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SRW)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v0.AuxInt = 31
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
- v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
- v2.AuxInt = -32
- v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
- v3.AddArg(y)
- v2.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // cond:
+ // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ y := v_1_1.Args[0]
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
+ // cond:
+ // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ y := v_1_1.Args[0]
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_1.AuxInt != 31 {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> (MOVDconst [31]) y)))
+ // cond:
+ // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 31 {
+ break
+ }
+ y := v_1_1.Args[1]
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32Ux64 x y)
+ // cond:
+ // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
+ v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
+ v2.AuxInt = -32
+ v2.AddArg(y)
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+}
+func rewriteValuePPC64_OpRsh32Ux8_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh32Ux8 x y)
+ // cond:
+ // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
+ v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
+ v2.AuxInt = -32
+ v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+}
+func rewriteValuePPC64_OpRsh32x16_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh32x16 x y)
+ // cond:
+ // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
+ v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
+ v2.AuxInt = -32
+ v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg(v0)
v.AddArg(x)
return true
}
+ // match: (Rsh32x64 x (AND y (MOVDconst [31])))
+ // cond:
+ // result: (SRAW x (ANDconst <typ.Int32> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32x64 x (AND (MOVDconst [31]) y))
+ // cond:
+ // result: (SRAW x (ANDconst <typ.Int32> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[1]
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32x64 x (ANDconst <typ.UInt> [31] y))
+ // cond:
+ // result: (SRAW x (ANDconst <typ.UInt> [31] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ if v_1.AuxInt != 31 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v0.AuxInt = 31
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // cond:
+ // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1.AuxInt != 31 {
+ break
+ }
+ y := v_1_1.Args[0]
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
+ // cond:
+ // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ y := v_1_1.Args[0]
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_1.AuxInt != 31 {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> (MOVDconst [31]) y)))
+ // cond:
+ // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 32 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 31 {
+ break
+ }
+ y := v_1_1.Args[1]
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 32
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 31
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh32x64 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64Ux32 x y)
+ // cond:
+ // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v0.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
+ v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
+ v2.AuxInt = -64
+ v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+}
+func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh64Ux64 x (Const64 [c]))
+ // cond: uint64(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64Ux64 _ (Const64 [c]))
+ // cond: uint64(c) >= 64
+ // result: (MOVDconst [0])
+ for {
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Rsh64Ux64 x (MOVDconst [c]))
+ // cond: uint64(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64Ux64 x (AND y (MOVDconst [63])))
+ // cond:
+ // result: (SRD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64Ux64 x (AND (MOVDconst [63]) y))
+ // cond:
+ // result: (SRD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[1]
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64Ux64 x (ANDconst <typ.UInt> [63] y))
+ // cond:
+ // result: (SRD x (ANDconst <typ.UInt> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
break
}
- c := v_1.AuxInt
- if !(uint32(c) < 64) {
+ if v_1.Type != typ.UInt {
break
}
- v.reset(OpPPC64SRDconst)
- v.AuxInt = c
+ if v_1.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SRD)
v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
return true
}
- // match: (Rsh64Ux32 x y)
+ // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
// cond:
- // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
+ // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
for {
_ = v.Args[1]
x := v.Args[0]
- y := v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ y := v_1_1.Args[0]
v.reset(OpPPC64SRD)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
- v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
- v2 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
- v2.AuxInt = -64
- v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
-}
-func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool {
- b := v.Block
- _ = b
- typ := &b.Func.Config.Types
- _ = typ
- // match: (Rsh64Ux64 x (Const64 [c]))
- // cond: uint64(c) < 64
- // result: (SRDconst x [c])
+ // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
+ // cond:
+ // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64SUB {
break
}
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
+ if v_1.Type != typ.UInt {
break
}
- v.reset(OpPPC64SRDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh64Ux64 _ (Const64 [c]))
- // cond: uint64(c) >= 64
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
break
}
- c := v_1.AuxInt
- if !(uint64(c) >= 64) {
+ if v_1_0.AuxInt != 64 {
break
}
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = 0
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ y := v_1_1.Args[0]
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
return true
}
- // match: (Rsh64Ux64 x (MOVDconst [c]))
- // cond: uint64(c) < 64
- // result: (SRDconst x [c])
+ // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> (MOVDconst [63]) y)))
+ // cond:
+ // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64SUB {
break
}
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
+ if v_1.Type != typ.UInt {
break
}
- v.reset(OpPPC64SRDconst)
- v.AuxInt = c
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 63 {
+ break
+ }
+ y := v_1_1.Args[1]
+ v.reset(OpPPC64SRD)
v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
return true
}
// match: (Rsh64Ux64 x y)
v.AddArg(x)
return true
}
+ // match: (Rsh64x64 x (AND y (MOVDconst [63])))
+ // cond:
+ // result: (SRAD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (AND (MOVDconst [63]) y))
+ // cond:
+ // result: (SRAD x (ANDconst <typ.Int64> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64AND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[1]
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (ANDconst <typ.UInt> [63] y))
+ // cond:
+ // result: (SRAD x (ANDconst <typ.UInt> [63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ if v_1.AuxInt != 63 {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v0.AuxInt = 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ // cond:
+ // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64ANDconst {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ if v_1_1.AuxInt != 63 {
+ break
+ }
+ y := v_1_1.Args[0]
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
+ // cond:
+ // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ y := v_1_1.Args[0]
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> (MOVDconst [63]) y)))
+ // cond:
+ // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64SUB {
+ break
+ }
+ if v_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_0.AuxInt != 64 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpPPC64AND {
+ break
+ }
+ if v_1_1.Type != typ.UInt {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_1_1_0.AuxInt != 63 {
+ break
+ }
+ y := v_1_1.Args[1]
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = 64
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
+ v2.AuxInt = 63
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh64x64 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))