case OpS390XCMPU:
return rewriteValueS390X_OpS390XCMPU_0(v)
case OpS390XCMPUconst:
- return rewriteValueS390X_OpS390XCMPUconst_0(v)
+ return rewriteValueS390X_OpS390XCMPUconst_0(v) || rewriteValueS390X_OpS390XCMPUconst_10(v)
case OpS390XCMPW:
return rewriteValueS390X_OpS390XCMPW_0(v)
case OpS390XCMPWU:
case OpS390XCMPWconst:
return rewriteValueS390X_OpS390XCMPWconst_0(v)
case OpS390XCMPconst:
- return rewriteValueS390X_OpS390XCMPconst_0(v)
+ return rewriteValueS390X_OpS390XCMPconst_0(v) || rewriteValueS390X_OpS390XCMPconst_10(v)
case OpS390XCPSDR:
return rewriteValueS390X_OpS390XCPSDR_0(v)
case OpS390XFADD:
case OpS390XORload:
return rewriteValueS390X_OpS390XORload_0(v)
case OpS390XSLD:
- return rewriteValueS390X_OpS390XSLD_0(v)
+ return rewriteValueS390X_OpS390XSLD_0(v) || rewriteValueS390X_OpS390XSLD_10(v)
case OpS390XSLW:
- return rewriteValueS390X_OpS390XSLW_0(v)
+ return rewriteValueS390X_OpS390XSLW_0(v) || rewriteValueS390X_OpS390XSLW_10(v)
case OpS390XSRAD:
- return rewriteValueS390X_OpS390XSRAD_0(v)
+ return rewriteValueS390X_OpS390XSRAD_0(v) || rewriteValueS390X_OpS390XSRAD_10(v)
case OpS390XSRADconst:
return rewriteValueS390X_OpS390XSRADconst_0(v)
case OpS390XSRAW:
- return rewriteValueS390X_OpS390XSRAW_0(v)
+ return rewriteValueS390X_OpS390XSRAW_0(v) || rewriteValueS390X_OpS390XSRAW_10(v)
case OpS390XSRAWconst:
return rewriteValueS390X_OpS390XSRAWconst_0(v)
case OpS390XSRD:
- return rewriteValueS390X_OpS390XSRD_0(v)
+ return rewriteValueS390X_OpS390XSRD_0(v) || rewriteValueS390X_OpS390XSRD_10(v)
case OpS390XSRDconst:
return rewriteValueS390X_OpS390XSRDconst_0(v)
case OpS390XSRW:
- return rewriteValueS390X_OpS390XSRW_0(v)
+ return rewriteValueS390X_OpS390XSRW_0(v) || rewriteValueS390X_OpS390XSRW_10(v)
case OpS390XSTM2:
return rewriteValueS390X_OpS390XSTM2_0(v)
case OpS390XSTMG2:
return rewriteValueS390X_OpS390XSTMG2_0(v)
case OpS390XSUB:
return rewriteValueS390X_OpS390XSUB_0(v)
- case OpS390XSUBEWcarrymask:
- return rewriteValueS390X_OpS390XSUBEWcarrymask_0(v)
- case OpS390XSUBEcarrymask:
- return rewriteValueS390X_OpS390XSUBEcarrymask_0(v)
case OpS390XSUBW:
return rewriteValueS390X_OpS390XSUBW_0(v)
case OpS390XSUBWconst:
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh16x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh16x16 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh16x32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh16x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh16x32 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh16x64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh16x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh16x64 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh16x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh16x8 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh32x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh32x16 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh32x32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh32x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh32x32 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh32x64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh32x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh32x64 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh32x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh32x8 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh64x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh64x16 <t> x y)
// cond:
- // result: (AND (SLD <t> x y) (SUBEcarrymask <t> (CMPWUconst (MOVHZreg y) [63])))
+ // result: (MOVDGE <t> (SLD <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh64x32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh64x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh64x32 <t> x y)
// cond:
- // result: (AND (SLD <t> x y) (SUBEcarrymask <t> (CMPWUconst y [63])))
+ // result: (MOVDGE <t> (SLD <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh64x64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh64x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh64x64 <t> x y)
// cond:
- // result: (AND (SLD <t> x y) (SUBEcarrymask <t> (CMPUconst y [63])))
+ // result: (MOVDGE <t> (SLD <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh64x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh64x8 <t> x y)
// cond:
- // result: (AND (SLD <t> x y) (SUBEcarrymask <t> (CMPWUconst (MOVBZreg y) [63])))
+ // result: (MOVDGE <t> (SLD <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh8x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh8x16 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh8x32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh8x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh8x32 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpLsh8x64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh8x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh8x64 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPUconst y [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Lsh8x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Lsh8x8 <t> x y)
// cond:
- // result: (ANDW (SLW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [31])))
+ // result: (MOVDGE <t> (SLW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSLW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh16Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh16Ux16 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVHZreg x) y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [15])))
+ // result: (MOVDGE <t> (SRW <t> (MOVHZreg x) y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 15
+ v3.AuxInt = 64
v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh16Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh16Ux32 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVHZreg x) y) (SUBEWcarrymask <t> (CMPWUconst y [15])))
+ // result: (MOVDGE <t> (SRW <t> (MOVHZreg x) y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 15
+ v3.AuxInt = 64
v3.AddArg(y)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh16Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh16Ux64 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVHZreg x) y) (SUBEWcarrymask <t> (CMPUconst y [15])))
+ // result: (MOVDGE <t> (SRW <t> (MOVHZreg x) y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v3.AuxInt = 15
+ v3.AuxInt = 64
v3.AddArg(y)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh16Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVHZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh16Ux8 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVHZreg x) y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [15])))
+ // result: (MOVDGE <t> (SRW <t> (MOVHZreg x) y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 15
+ v3.AuxInt = 64
v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16x16 <t> x y)
+ // match: (Rsh16x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x16 x y)
// cond:
- // result: (SRAW <t> (MOVHreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVHZreg y) [15])))))
+ // result: (SRAW (MOVHreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVHZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 15
- v5 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
- v5.AddArg(y)
- v4.AddArg(v5)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v4.AddArg(y)
+ v3.AddArg(v4)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16x32 <t> x y)
+ // match: (Rsh16x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x32 x y)
// cond:
- // result: (SRAW <t> (MOVHreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst y [15])))))
+ // result: (SRAW (MOVHreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 15
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v3.AddArg(y)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16x64 <t> x y)
+ // match: (Rsh16x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x64 x y)
// cond:
- // result: (SRAW <t> (MOVHreg x) (OR <y.Type> y (NOT <y.Type> (SUBEcarrymask <y.Type> (CMPUconst y [15])))))
+ // result: (SRAW (MOVHreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XOR, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOT, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v4.AuxInt = 15
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v3.AddArg(y)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16x8 <t> x y)
+ // match: (Rsh16x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVHreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh16x8 x y)
// cond:
- // result: (SRAW <t> (MOVHreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVBZreg y) [15])))))
+ // result: (SRAW (MOVHreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVBZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 15
- v5 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
- v5.AddArg(y)
- v4.AddArg(v5)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v4.AddArg(y)
+ v3.AddArg(v4)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh32Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32Ux16 <t> x y)
// cond:
- // result: (ANDW (SRW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [31])))
+ // result: (MOVDGE <t> (SRW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh32Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32Ux32 <t> x y)
// cond:
- // result: (ANDW (SRW <t> x y) (SUBEWcarrymask <t> (CMPWUconst y [31])))
+ // result: (MOVDGE <t> (SRW <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh32Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32Ux64 <t> x y)
// cond:
- // result: (ANDW (SRW <t> x y) (SUBEWcarrymask <t> (CMPUconst y [31])))
+ // result: (MOVDGE <t> (SRW <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh32Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh32Ux8 <t> x y)
// cond:
- // result: (ANDW (SRW <t> x y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [31])))
+ // result: (MOVDGE <t> (SRW <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 31
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh32x16 <t> x y)
+ // match: (Rsh32x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x16 x y)
// cond:
- // result: (SRAW <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVHZreg y) [31])))))
+ // result: (SRAW x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVHZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 31
- v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
func rewriteValueS390X_OpRsh32x32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh32x32 <t> x y)
+ // match: (Rsh32x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x32 x y)
// cond:
- // result: (SRAW <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst y [31])))))
+ // result: (SRAW x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 31
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v2.AddArg(y)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
func rewriteValueS390X_OpRsh32x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh32x64 <t> x y)
+ // match: (Rsh32x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x64 x y)
// cond:
- // result: (SRAW <t> x (OR <y.Type> y (NOT <y.Type> (SUBEcarrymask <y.Type> (CMPUconst y [31])))))
+ // result: (SRAW x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XOR, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOT, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v3.AuxInt = 31
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v2.AddArg(y)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh32x8 <t> x y)
+ // match: (Rsh32x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh32x8 x y)
// cond:
- // result: (SRAW <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVBZreg y) [31])))))
+ // result: (SRAW x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVBZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 31
- v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh64Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64Ux16 <t> x y)
// cond:
- // result: (AND (SRD <t> x y) (SUBEcarrymask <t> (CMPWUconst (MOVHZreg y) [63])))
+ // result: (MOVDGE <t> (SRD <t> x y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh64Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64Ux32 <t> x y)
// cond:
- // result: (AND (SRD <t> x y) (SUBEcarrymask <t> (CMPWUconst y [63])))
+ // result: (MOVDGE <t> (SRD <t> x y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
func rewriteValueS390X_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Rsh64Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64Ux64 <t> x y)
// cond:
- // result: (AND (SRD <t> x y) (SUBEcarrymask <t> (CMPUconst y [63])))
+ // result: (MOVDGE <t> (SRD <t> x y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v2.AddArg(y)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh64Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh64Ux8 <t> x y)
// cond:
- // result: (AND (SRD <t> x y) (SUBEcarrymask <t> (CMPWUconst (MOVBZreg y) [63])))
+ // result: (MOVDGE <t> (SRD <t> x y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XAND)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRD, t)
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, t)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v1.AuxInt = 0
+ v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v2.AuxInt = 63
+ v2.AuxInt = 64
v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v1.AddArg(v2)
- v.AddArg(v1)
+ v.AddArg(v2)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh64x16 <t> x y)
+ // match: (Rsh64x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x16 x y)
// cond:
- // result: (SRAD <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVHZreg y) [63])))))
+ // result: (SRAD x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVHZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAD)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 63
- v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v3 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
func rewriteValueS390X_OpRsh64x32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh64x32 <t> x y)
+ // match: (Rsh64x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x32 x y)
// cond:
- // result: (SRAD <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst y [63])))))
+ // result: (SRAD x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAD)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 63
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v2.AddArg(y)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
func rewriteValueS390X_OpRsh64x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh64x64 <t> x y)
+ // match: (Rsh64x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x64 x y)
// cond:
- // result: (SRAD <t> x (OR <y.Type> y (NOT <y.Type> (SUBEcarrymask <y.Type> (CMPUconst y [63])))))
+ // result: (SRAD x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAD)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XOR, y.Type)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOT, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v3.AuxInt = 63
- v3.AddArg(y)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v2.AddArg(y)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh64x8 <t> x y)
- // cond:
- // result: (SRAD <t> x (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVBZreg y) [63])))))
+ // match: (Rsh64x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAD x y)
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
v.reset(OpS390XSRAD)
- v.Type = t
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
- v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 63
- v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
- v1.AddArg(v2)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh64x8 x y)
+ // cond:
+ // result: (SRAD x (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVBZreg y) [64])))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
+ v0.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v1.AuxInt = 63
v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v2.AuxInt = 64
+ v3 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v3.AddArg(y)
+ v2.AddArg(v3)
+ v0.AddArg(v2)
v.AddArg(v0)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh8Ux16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh8Ux16 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVBZreg x) y) (SUBEWcarrymask <t> (CMPWUconst (MOVHZreg y) [7])))
+ // result: (MOVDGE <t> (SRW <t> (MOVBZreg x) y) (MOVDconst [0]) (CMPWUconst (MOVHZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 7
+ v3.AuxInt = 64
v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh8Ux32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh8Ux32 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVBZreg x) y) (SUBEWcarrymask <t> (CMPWUconst y [7])))
+ // result: (MOVDGE <t> (SRW <t> (MOVBZreg x) y) (MOVDconst [0]) (CMPWUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 7
+ v3.AuxInt = 64
v3.AddArg(y)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh8Ux64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh8Ux64 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVBZreg x) y) (SUBEWcarrymask <t> (CMPUconst y [7])))
+ // result: (MOVDGE <t> (SRW <t> (MOVBZreg x) y) (MOVDconst [0]) (CMPUconst y [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v3.AuxInt = 7
+ v3.AuxInt = 64
v3.AddArg(y)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
+ // match: (Rsh8Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW (MOVBZreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
// match: (Rsh8Ux8 <t> x y)
// cond:
- // result: (ANDW (SRW <t> (MOVBZreg x) y) (SUBEWcarrymask <t> (CMPWUconst (MOVBZreg y) [7])))
+ // result: (MOVDGE <t> (SRW <t> (MOVBZreg x) y) (MOVDconst [0]) (CMPWUconst (MOVBZreg y) [64]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
- v.reset(OpS390XANDW)
+ v.reset(OpS390XMOVDGE)
+ v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XSRW, t)
v1 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v1.AddArg(x)
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, t)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, typ.UInt64)
+ v2.AuxInt = 0
+ v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v3.AuxInt = 7
+ v3.AuxInt = 64
v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v2.AddArg(v3)
- v.AddArg(v2)
+ v.AddArg(v3)
return true
}
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8x16 <t> x y)
+ // match: (Rsh8x16 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x16 x y)
// cond:
- // result: (SRAW <t> (MOVBreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVHZreg y) [7])))))
+ // result: (SRAW (MOVBreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVHZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 7
- v5 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
- v5.AddArg(y)
- v4.AddArg(v5)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v4 := b.NewValue0(v.Pos, OpS390XMOVHZreg, typ.UInt64)
+ v4.AddArg(y)
+ v3.AddArg(v4)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8x32 <t> x y)
+ // match: (Rsh8x32 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x32 x y)
// cond:
- // result: (SRAW <t> (MOVBreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst y [7])))))
+ // result: (SRAW (MOVBreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 7
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v3.AddArg(y)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8x64 <t> x y)
+ // match: (Rsh8x64 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x64 x y)
// cond:
- // result: (SRAW <t> (MOVBreg x) (OR <y.Type> y (NOT <y.Type> (SUBEcarrymask <y.Type> (CMPUconst y [7])))))
+ // result: (SRAW (MOVBreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPUconst y [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XOR, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOT, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
- v4.AuxInt = 7
- v4.AddArg(y)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v3.AddArg(y)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8x8 <t> x y)
+ // match: (Rsh8x8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRAW (MOVBreg x) y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(shiftIsBounded(v)) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Rsh8x8 x y)
// cond:
- // result: (SRAW <t> (MOVBreg x) (ORW <y.Type> y (NOTW <y.Type> (SUBEWcarrymask <y.Type> (CMPWUconst (MOVBZreg y) [7])))))
+ // result: (SRAW (MOVBreg x) (MOVDGE <y.Type> y (MOVDconst <y.Type> [63]) (CMPWUconst (MOVBZreg y) [64])))
for {
- t := v.Type
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpS390XSRAW)
- v.Type = t
v0 := b.NewValue0(v.Pos, OpS390XMOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpS390XORW, y.Type)
+ v1 := b.NewValue0(v.Pos, OpS390XMOVDGE, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpS390XNOTW, y.Type)
- v3 := b.NewValue0(v.Pos, OpS390XSUBEWcarrymask, y.Type)
- v4 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
- v4.AuxInt = 7
- v5 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
- v5.AddArg(y)
- v4.AddArg(v5)
- v3.AddArg(v4)
- v2.AddArg(v3)
+ v2 := b.NewValue0(v.Pos, OpS390XMOVDconst, y.Type)
+ v2.AuxInt = 63
v1.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpS390XCMPWUconst, types.TypeFlags)
+ v3.AuxInt = 64
+ v4 := b.NewValue0(v.Pos, OpS390XMOVBZreg, typ.UInt64)
+ v4.AddArg(y)
+ v3.AddArg(v4)
+ v1.AddArg(v3)
v.AddArg(v1)
return true
}
return false
}
func rewriteValueS390X_OpS390XAND_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (AND x (MOVDconst [c]))
// cond: is32Bit(c) && c < 0
// result: (ANDconst [c] x)
v.AddArg(x)
return true
}
+ // match: (AND x (MOVDconst [c]))
+ // cond: is32Bit(c) && c >= 0
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64(int32(c))] x))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(is32Bit(c) && c >= 0) {
+ break
+ }
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(int32(c))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (AND (MOVDconst [c]) x)
+ // cond: is32Bit(c) && c >= 0
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64(int32(c))] x))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v.Args[1]
+ if !(is32Bit(c) && c >= 0) {
+ break
+ }
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(int32(c))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (AND x (MOVDconst [0xFF]))
// cond:
// result: (MOVBZreg x)
v.AddArg(x)
return true
}
+ return false
+}
+func rewriteValueS390X_OpS390XAND_10(v *Value) bool {
// match: (AND (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [c&d])
v.AuxInt = c & d
return true
}
- return false
-}
-func rewriteValueS390X_OpS390XAND_10(v *Value) bool {
// match: (AND x x)
// cond:
// result: x
v.reset(OpS390XFlagGT)
return true
}
+ // match: (CMPUconst (SRDconst _ [c]) [n])
+ // cond: c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n)
+ // result: (FlagLT)
+ for {
+ n := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XSRDconst {
+ break
+ }
+ c := v_0.AuxInt
+ if !(c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n)) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPUconst (MOVWZreg x) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPUconst x:(MOVHreg _) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVHreg {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPUconst x:(MOVHZreg _) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVHZreg {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPUconst x:(MOVBreg _) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVBreg {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPUconst x:(MOVBZreg _) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVBZreg {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPUconst (MOVWZreg x:(ANDWconst [m] _)) [c])
+ // cond: int32(m) >= 0
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpS390XANDWconst {
+ break
+ }
+ m := x.AuxInt
+ if !(int32(m) >= 0) {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XCMPUconst_10(v *Value) bool {
+ // match: (CMPUconst (MOVWreg x:(ANDWconst [m] _)) [c])
+ // cond: int32(m) >= 0
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpS390XANDWconst {
+ break
+ }
+ m := x.AuxInt
+ if !(int32(m) >= 0) {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XCMPW_0(v *Value) bool {
v.AddArg(v0)
return true
}
+ // match: (CMPW x (MOVWreg y))
+ // cond:
+ // result: (CMPW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XCMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPW x (MOVWZreg y))
+ // cond:
+ // result: (CMPW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XCMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPW (MOVWreg x) y)
+ // cond:
+ // result: (CMPW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpS390XCMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPW (MOVWZreg x) y)
+ // cond:
+ // result: (CMPW x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpS390XCMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XCMPWU_0(v *Value) bool {
v.AddArg(v0)
return true
}
+ // match: (CMPWU x (MOVWreg y))
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XCMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPWU x (MOVWZreg y))
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XCMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPWU (MOVWreg x) y)
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpS390XCMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPWU (MOVWZreg x) y)
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpS390XCMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XCMPWUconst_0(v *Value) bool {
v.reset(OpS390XFlagGT)
return true
}
+ // match: (CMPWUconst (MOVBZreg _) [c])
+ // cond: 0xff < c
+ // result: (FlagLT)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVBZreg {
+ break
+ }
+ if !(0xff < c) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWUconst (MOVHZreg _) [c])
+ // cond: 0xffff < c
+ // result: (FlagLT)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVHZreg {
+ break
+ }
+ if !(0xffff < c) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWUconst (SRWconst _ [c]) [n])
+ // cond: c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n)
+ // result: (FlagLT)
+ for {
+ n := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XSRWconst {
+ break
+ }
+ c := v_0.AuxInt
+ if !(c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n)) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWUconst (ANDWconst _ [m]) [n])
+ // cond: uint32(m) < uint32(n)
+ // result: (FlagLT)
+ for {
+ n := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XANDWconst {
+ break
+ }
+ m := v_0.AuxInt
+ if !(uint32(m) < uint32(n)) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWUconst (MOVWreg x) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPWUconst (MOVWZreg x) [c])
+ // cond:
+ // result: (CMPWUconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XCMPWconst_0(v *Value) bool {
v.reset(OpS390XFlagGT)
return true
}
- // match: (CMPWconst (SRWconst _ [c]) [n])
- // cond: 0 <= n && 0 < c && c <= 32 && (1<<uint64(32-c)) <= uint64(n)
+ // match: (CMPWconst (MOVBZreg _) [c])
+ // cond: 0xff < c
+ // result: (FlagLT)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVBZreg {
+ break
+ }
+ if !(0xff < c) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWconst (MOVHZreg _) [c])
+ // cond: 0xffff < c
// result: (FlagLT)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVHZreg {
+ break
+ }
+ if !(0xffff < c) {
+ break
+ }
+ v.reset(OpS390XFlagLT)
+ return true
+ }
+ // match: (CMPWconst (SRWconst _ [c]) [n])
+ // cond: c > 0 && n < 0
+ // result: (FlagGT)
for {
n := v.AuxInt
v_0 := v.Args[0]
break
}
c := v_0.AuxInt
- if !(0 <= n && 0 < c && c <= 32 && (1<<uint64(32-c)) <= uint64(n)) {
+ if !(c > 0 && n < 0) {
break
}
- v.reset(OpS390XFlagLT)
+ v.reset(OpS390XFlagGT)
return true
}
// match: (CMPWconst (ANDWconst _ [m]) [n])
- // cond: 0 <= int32(m) && int32(m) < int32(n)
+ // cond: int32(m) >= 0 && int32(m) < int32(n)
// result: (FlagLT)
for {
n := v.AuxInt
break
}
m := v_0.AuxInt
- if !(0 <= int32(m) && int32(m) < int32(n)) {
+ if !(int32(m) >= 0 && int32(m) < int32(n)) {
break
}
v.reset(OpS390XFlagLT)
return true
}
+ // match: (CMPWconst x:(SRWconst _ [c]) [n])
+ // cond: c > 0 && n >= 0
+ // result: (CMPWUconst x [n])
+ for {
+ n := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XSRWconst {
+ break
+ }
+ c := x.AuxInt
+ if !(c > 0 && n >= 0) {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = n
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPWconst (MOVWreg x) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPWconst (MOVWZreg x) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XCMPconst_0(v *Value) bool {
v.reset(OpS390XFlagGT)
return true
}
- // match: (CMPconst (MOVBZreg _) [c])
- // cond: 0xFF < c
- // result: (FlagLT)
+ // match: (CMPconst (SRDconst _ [c]) [n])
+ // cond: c > 0 && n < 0
+ // result: (FlagGT)
for {
- c := v.AuxInt
+ n := v.AuxInt
v_0 := v.Args[0]
- if v_0.Op != OpS390XMOVBZreg {
+ if v_0.Op != OpS390XSRDconst {
break
}
- if !(0xFF < c) {
+ c := v_0.AuxInt
+ if !(c > 0 && n < 0) {
break
}
- v.reset(OpS390XFlagLT)
+ v.reset(OpS390XFlagGT)
return true
}
- // match: (CMPconst (MOVHZreg _) [c])
- // cond: 0xFFFF < c
- // result: (FlagLT)
+ // match: (CMPconst (MOVWreg x) [c])
+ // cond:
+ // result: (CMPWconst x [c])
for {
c := v.AuxInt
v_0 := v.Args[0]
- if v_0.Op != OpS390XMOVHZreg {
+ if v_0.Op != OpS390XMOVWreg {
break
}
- if !(0xFFFF < c) {
+ x := v_0.Args[0]
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPconst x:(MOVHreg _) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVHreg {
break
}
- v.reset(OpS390XFlagLT)
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
return true
}
- // match: (CMPconst (MOVWZreg _) [c])
- // cond: 0xFFFFFFFF < c
- // result: (FlagLT)
+ // match: (CMPconst x:(MOVHZreg _) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVHZreg {
+ break
+ }
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPconst x:(MOVBreg _) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVBreg {
+ break
+ }
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPconst x:(MOVBZreg _) [c])
+ // cond:
+ // result: (CMPWconst x [c])
+ for {
+ c := v.AuxInt
+ x := v.Args[0]
+ if x.Op != OpS390XMOVBZreg {
+ break
+ }
+ v.reset(OpS390XCMPWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (CMPconst (MOVWZreg x:(ANDWconst [m] _)) [c])
+ // cond: int32(m) >= 0 && c >= 0
+ // result: (CMPWUconst x [c])
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpS390XMOVWZreg {
break
}
- if !(0xFFFFFFFF < c) {
+ x := v_0.Args[0]
+ if x.Op != OpS390XANDWconst {
break
}
- v.reset(OpS390XFlagLT)
+ m := x.AuxInt
+ if !(int32(m) >= 0 && c >= 0) {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
return true
}
- // match: (CMPconst (SRDconst _ [c]) [n])
- // cond: 0 <= n && 0 < c && c <= 64 && (1<<uint64(64-c)) <= uint64(n)
- // result: (FlagLT)
+ return false
+}
+func rewriteValueS390X_OpS390XCMPconst_10(v *Value) bool {
+ // match: (CMPconst (MOVWreg x:(ANDWconst [m] _)) [c])
+ // cond: int32(m) >= 0 && c >= 0
+ // result: (CMPWUconst x [c])
for {
- n := v.AuxInt
+ c := v.AuxInt
v_0 := v.Args[0]
- if v_0.Op != OpS390XSRDconst {
+ if v_0.Op != OpS390XMOVWreg {
break
}
- c := v_0.AuxInt
- if !(0 <= n && 0 < c && c <= 64 && (1<<uint64(64-c)) <= uint64(n)) {
+ x := v_0.Args[0]
+ if x.Op != OpS390XANDWconst {
break
}
- v.reset(OpS390XFlagLT)
+ m := x.AuxInt
+ if !(int32(m) >= 0 && c >= 0) {
+ break
+ }
+ v.reset(OpS390XCMPWUconst)
+ v.AuxInt = c
+ v.AddArg(x)
return true
}
- // match: (CMPconst (ANDconst _ [m]) [n])
- // cond: 0 <= m && m < n
- // result: (FlagLT)
+ // match: (CMPconst x:(SRDconst _ [c]) [n])
+ // cond: c > 0 && n >= 0
+ // result: (CMPUconst x [n])
for {
n := v.AuxInt
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
+ x := v.Args[0]
+ if x.Op != OpS390XSRDconst {
break
}
- m := v_0.AuxInt
- if !(0 <= m && m < n) {
+ c := x.AuxInt
+ if !(c > 0 && n >= 0) {
break
}
- v.reset(OpS390XFlagLT)
+ v.reset(OpS390XCMPUconst)
+ v.AuxInt = n
+ v.AddArg(x)
return true
}
return false
func rewriteValueS390X_OpS390XMOVBZreg_10(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (MOVBZreg x:(MOVBZreg _))
// cond:
// result: (MOVDreg x)
v0.AddArg(mem)
return true
}
+ // match: (MOVBZreg (ANDWconst [m] x))
+ // cond:
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64( uint8(m))] x))
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XANDWconst {
+ break
+ }
+ m := v_0.AuxInt
+ x := v_0.Args[0]
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(uint8(m))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XMOVBload_0(v *Value) bool {
func rewriteValueS390X_OpS390XMOVBreg_0(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (MOVBreg x:(MOVBload _ _))
// cond:
// result: (MOVDreg x)
v0.AddArg(mem)
return true
}
+ // match: (MOVBreg (ANDWconst [m] x))
+ // cond: int8(m) >= 0
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64( uint8(m))] x))
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XANDWconst {
+ break
+ }
+ m := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(int8(m) >= 0) {
+ break
+ }
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(uint8(m))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XMOVBstore_0(v *Value) bool {
func rewriteValueS390X_OpS390XMOVHZreg_10(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (MOVHZreg x:(MOVHloadidx [off] {sym} ptr idx mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVHZloadidx <v.Type> [off] {sym} ptr idx mem)
v0.AddArg(mem)
return true
}
+ // match: (MOVHZreg (ANDWconst [m] x))
+ // cond:
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64(uint16(m))] x))
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XANDWconst {
+ break
+ }
+ m := v_0.AuxInt
+ x := v_0.Args[0]
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(uint16(m))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XMOVHload_0(v *Value) bool {
func rewriteValueS390X_OpS390XMOVHreg_10(v *Value) bool {
b := v.Block
_ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (MOVHreg x:(MOVHload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVHload <v.Type> [off] {sym} ptr mem)
v0.AddArg(mem)
return true
}
+ // match: (MOVHreg (ANDWconst [m] x))
+ // cond: int16(m) >= 0
+ // result: (MOVWZreg (ANDWconst <typ.UInt32> [int64(uint16(m))] x))
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XANDWconst {
+ break
+ }
+ m := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(int16(m) >= 0) {
+ break
+ }
+ v.reset(OpS390XMOVWZreg)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = int64(uint16(m))
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValueS390X_OpS390XMOVHstore_0(v *Value) bool {
return false
}
func rewriteValueS390X_OpS390XSLD_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SLD x (MOVDconst [c]))
// cond:
- // result: (SLDconst [c&63] x)
+ // result: (SLDconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SLD x (ANDconst [63] y))
+ // match: (SLD x (AND (MOVDconst [c]) y))
+ // cond:
+ // result: (SLD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLD x (AND y (MOVDconst [c])))
+ // cond:
+ // result: (SLD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLD x (ANDWconst [c] y))
+ // cond: c&63 == 63
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XANDWconst {
+ break
+ }
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVDreg y))
// cond:
// result: (SLD x y)
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpS390XANDconst {
+ if v_1.Op != OpS390XMOVDreg {
break
}
- if v_1.AuxInt != 63 {
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVWreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVHreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVBreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVWZreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLD x (MOVHZreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSLD_10(v *Value) bool {
+ // match: (SLD x (MOVBZreg y))
+ // cond:
+ // result: (SLD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
return false
}
func rewriteValueS390X_OpS390XSLW_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SLW x (MOVDconst [c]))
// cond:
- // result: (SLWconst [c&63] x)
+ // result: (SLWconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SLW x (ANDWconst [63] y))
+ // match: (SLW x (AND (MOVDconst [c]) y))
+ // cond:
+ // result: (SLW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLW x (AND y (MOVDconst [c])))
// cond:
+ // result: (SLW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLW x (ANDWconst [c] y))
+ // cond: c&63 == 63
// result: (SLW x y)
for {
_ = v.Args[1]
if v_1.Op != OpS390XANDWconst {
break
}
- if v_1.AuxInt != 63 {
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVDreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVWreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVHreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVBreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVWZreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SLW x (MOVHZreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSLW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSLW_10(v *Value) bool {
+ // match: (SLW x (MOVBZreg y))
+ // cond:
+ // result: (SLW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
return false
}
func rewriteValueS390X_OpS390XSRAD_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SRAD x (MOVDconst [c]))
// cond:
- // result: (SRADconst [c&63] x)
+ // result: (SRADconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SRAD x (ANDconst [63] y))
+ // match: (SRAD x (AND (MOVDconst [c]) y))
+ // cond:
+ // result: (SRAD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAD x (AND y (MOVDconst [c])))
+ // cond:
+ // result: (SRAD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAD x (ANDWconst [c] y))
+ // cond: c&63 == 63
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XANDWconst {
+ break
+ }
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVDreg y))
// cond:
// result: (SRAD x y)
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpS390XANDconst {
+ if v_1.Op != OpS390XMOVDreg {
break
}
- if v_1.AuxInt != 63 {
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVWreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVHreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVBreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVWZreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAD x (MOVHZreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSRAD_10(v *Value) bool {
+ // match: (SRAD x (MOVBZreg y))
+ // cond:
+ // result: (SRAD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
return false
}
func rewriteValueS390X_OpS390XSRAW_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SRAW x (MOVDconst [c]))
// cond:
- // result: (SRAWconst [c&63] x)
+ // result: (SRAWconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SRAW x (ANDWconst [63] y))
+ // match: (SRAW x (AND (MOVDconst [c]) y))
+ // cond:
+ // result: (SRAW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAW x (AND y (MOVDconst [c])))
// cond:
+ // result: (SRAW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAW x (ANDWconst [c] y))
+ // cond: c&63 == 63
// result: (SRAW x y)
for {
_ = v.Args[1]
if v_1.Op != OpS390XANDWconst {
break
}
- if v_1.AuxInt != 63 {
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVDreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVWreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVHreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVBreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVWZreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAW x (MOVHZreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRAW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSRAW_10(v *Value) bool {
+ // match: (SRAW x (MOVBZreg y))
+ // cond:
+ // result: (SRAW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
return false
}
func rewriteValueS390X_OpS390XSRD_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SRD x (MOVDconst [c]))
// cond:
- // result: (SRDconst [c&63] x)
+ // result: (SRDconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SRD x (ANDconst [63] y))
+ // match: (SRD x (AND (MOVDconst [c]) y))
// cond:
+ // result: (SRD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRD x (AND y (MOVDconst [c])))
+ // cond:
+ // result: (SRD x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRD x (ANDWconst [c] y))
+ // cond: c&63 == 63
// result: (SRD x y)
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpS390XANDconst {
+ if v_1.Op != OpS390XANDWconst {
break
}
- if v_1.AuxInt != 63 {
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVDreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVWreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVHreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVBreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVWZreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRD x (MOVHZreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRD)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSRD_10(v *Value) bool {
+ // match: (SRD x (MOVBZreg y))
+ // cond:
+ // result: (SRD x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
return false
}
func rewriteValueS390X_OpS390XSRW_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ typ := &b.Func.Config.Types
+ _ = typ
// match: (SRW x (MOVDconst [c]))
// cond:
- // result: (SRWconst [c&63] x)
+ // result: (SRWconst x [c&63])
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
return true
}
- // match: (SRW x (ANDWconst [63] y))
+ // match: (SRW x (AND (MOVDconst [c]) y))
// cond:
+ // result: (SRW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_0.AuxInt
+ y := v_1.Args[1]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRW x (AND y (MOVDconst [c])))
+ // cond:
+ // result: (SRW x (ANDWconst <typ.UInt32> [c&63] y))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XAND {
+ break
+ }
+ _ = v_1.Args[1]
+ y := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_1_1.AuxInt
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpS390XANDWconst, typ.UInt32)
+ v0.AuxInt = c & 63
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRW x (ANDWconst [c] y))
+ // cond: c&63 == 63
// result: (SRW x y)
for {
_ = v.Args[1]
if v_1.Op != OpS390XANDWconst {
break
}
- if v_1.AuxInt != 63 {
+ c := v_1.AuxInt
+ y := v_1.Args[0]
+ if !(c&63 == 63) {
+ break
+ }
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVDreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVWreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVHreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVBreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVWZreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRW x (MOVHZreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVHZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpS390XSRW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValueS390X_OpS390XSRW_10(v *Value) bool {
+ // match: (SRW x (MOVBZreg y))
+ // cond:
+ // result: (SRW x y)
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVBZreg {
break
}
y := v_1.Args[0]
}
return false
}
-func rewriteValueS390X_OpS390XSUBEWcarrymask_0(v *Value) bool {
- // match: (SUBEWcarrymask (FlagEQ))
- // cond:
- // result: (MOVDconst [-1])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagEQ {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
- return true
- }
- // match: (SUBEWcarrymask (FlagLT))
- // cond:
- // result: (MOVDconst [-1])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagLT {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
- return true
- }
- // match: (SUBEWcarrymask (FlagGT))
- // cond:
- // result: (MOVDconst [0])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagGT {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
- return true
- }
- return false
-}
-func rewriteValueS390X_OpS390XSUBEcarrymask_0(v *Value) bool {
- // match: (SUBEcarrymask (FlagEQ))
- // cond:
- // result: (MOVDconst [-1])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagEQ {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
- return true
- }
- // match: (SUBEcarrymask (FlagLT))
- // cond:
- // result: (MOVDconst [-1])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagLT {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
- return true
- }
- // match: (SUBEcarrymask (FlagGT))
- // cond:
- // result: (MOVDconst [0])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XFlagGT {
- break
- }
- v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
- return true
- }
- return false
-}
func rewriteValueS390X_OpS390XSUBW_0(v *Value) bool {
b := v.Block
_ = b