case OpRsh32Ux32:
return rewriteValuePPC64_OpRsh32Ux32_0(v)
case OpRsh32Ux64:
- return rewriteValuePPC64_OpRsh32Ux64_0(v)
+ return rewriteValuePPC64_OpRsh32Ux64_0(v) || rewriteValuePPC64_OpRsh32Ux64_10(v)
case OpRsh32Ux8:
return rewriteValuePPC64_OpRsh32Ux8_0(v)
case OpRsh32x16:
case OpRsh32x32:
return rewriteValuePPC64_OpRsh32x32_0(v)
case OpRsh32x64:
- return rewriteValuePPC64_OpRsh32x64_0(v)
+ return rewriteValuePPC64_OpRsh32x64_0(v) || rewriteValuePPC64_OpRsh32x64_10(v)
case OpRsh32x8:
return rewriteValuePPC64_OpRsh32x8_0(v)
case OpRsh64Ux16:
case OpRsh64Ux32:
return rewriteValuePPC64_OpRsh64Ux32_0(v)
case OpRsh64Ux64:
- return rewriteValuePPC64_OpRsh64Ux64_0(v)
+ return rewriteValuePPC64_OpRsh64Ux64_0(v) || rewriteValuePPC64_OpRsh64Ux64_10(v)
case OpRsh64Ux8:
return rewriteValuePPC64_OpRsh64Ux8_0(v)
case OpRsh64x16:
case OpRsh64x32:
return rewriteValuePPC64_OpRsh64x32_0(v)
case OpRsh64x64:
- return rewriteValuePPC64_OpRsh64x64_0(v)
+ return rewriteValuePPC64_OpRsh64x64_0(v) || rewriteValuePPC64_OpRsh64x64_10(v)
case OpRsh64x8:
return rewriteValuePPC64_OpRsh64x8_0(v)
case OpRsh8Ux16:
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh16x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh16x16 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Lsh16x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh16x32 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Lsh16x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh16x64 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh16x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh16x8 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh32x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh32x16 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Lsh32x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh32x32 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Lsh32x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh32x64 x (AND y (MOVDconst [31])))
// cond:
// result: (SLW x (ANDconst <typ.Int32> [31] y))
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh32x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh32x8 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh64x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh64x16 x y)
// cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Lsh64x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh64x32 x y)
// cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Lsh64x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh64x64 x (AND y (MOVDconst [63])))
// cond:
// result: (SLD x (ANDconst <typ.Int64> [63] y))
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh64x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh64x8 x y)
// cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh8x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh8x16 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Lsh8x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh8x32 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Lsh8x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh8x64 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Lsh8x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Lsh8x8 x y)
// cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16Ux16 x y)
// cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh16Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16Ux32 x y)
// cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Rsh16Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16Ux64 x y)
// cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16Ux8 x y)
// cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16x16 x y)
- // cond:
- // result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
- v1.AddArg(y)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x16 x y)
+ // cond:
+ // result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
+ v1.AddArg(y)
v2 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
v3 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
v3.AuxInt = -16
return true
}
// match: (Rsh16x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x32 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Rsh16x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x64 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x8 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32Ux16 x y)
// cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh32Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32Ux32 x y)
// cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Rsh32Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32Ux64 x (AND y (MOVDconst [31])))
// cond:
// result: (SRW x (ANDconst <typ.Int32> [31] y))
v.AddArg(v0)
return true
}
+ return false
+}
+func rewriteValuePPC64_OpRsh32Ux64_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (Rsh32Ux64 x y)
// cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32Ux8 x y)
// cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x16 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh32x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x32 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Rsh32x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32x64 x (AND y (MOVDconst [31])))
// cond:
// result: (SRAW x (ANDconst <typ.Int32> [31] y))
v.AddArg(v0)
return true
}
+ return false
+}
+func rewriteValuePPC64_OpRsh32x64_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (Rsh32x64 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x8 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64Ux16 x y)
// cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh64Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64Ux32 x y)
// cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Rsh64Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64Ux64 x (AND y (MOVDconst [63])))
// cond:
// result: (SRD x (ANDconst <typ.Int64> [63] y))
v.AddArg(v0)
return true
}
+ return false
+}
+func rewriteValuePPC64_OpRsh64Ux64_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (Rsh64Ux64 x y)
// cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64Ux8 x y)
// cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x16 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh64x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x32 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for {
v.AddArg(x)
return true
}
+ // match: (Rsh64x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64x64 x (AND y (MOVDconst [63])))
// cond:
// result: (SRAD x (ANDconst <typ.Int64> [63] y))
v.AddArg(v0)
return true
}
+ return false
+}
+func rewriteValuePPC64_OpRsh64x64_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (Rsh64x64 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x8 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8Ux16 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh8Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8Ux32 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Rsh8Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8Ux64 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8Ux8 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x16 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for {
return true
}
// match: (Rsh8x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x32 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for {
return true
}
// match: (Rsh8x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x64 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for {
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpPPC64SRAW)
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x8 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for {