// constant shifts
// generic opt rewrites all constant shifts to shift by Const64
-(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SLLconst x [c])
-(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SRAconst x [c])
-(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 -> (SRLconst x [c])
-(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SLLconst x [c])
-(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
-(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 -> (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
-(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SLLconst x [c])
-(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
-(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 -> (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
+(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 => (SLLconst x [int32(c)])
+(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 => (SRAconst x [int32(c)])
+(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 => (SRLconst x [int32(c)])
+(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 => (SLLconst x [int32(c)])
+(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 => (SRAconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
+(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 => (SRLconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
+(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 => (SLLconst x [int32(c)])
+(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 => (SRAconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
+(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 => (SRLconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
// large constant shifts
(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 => (Const32 [0])
(BICconst [c] (MOVWconst [d])) => (MOVWconst [d&^c])
(BICconst [c] (BICconst [d] x)) => (BICconst [c|d] x)
(MVN (MOVWconst [c])) => (MOVWconst [^c])
-(MOVBreg (MOVWconst [c])) -> (MOVWconst [int64(int8(c))])
-(MOVBUreg (MOVWconst [c])) -> (MOVWconst [int64(uint8(c))])
-(MOVHreg (MOVWconst [c])) -> (MOVWconst [int64(int16(c))])
-(MOVHUreg (MOVWconst [c])) -> (MOVWconst [int64(uint16(c))])
+(MOVBreg (MOVWconst [c])) => (MOVWconst [int32(int8(c))])
+(MOVBUreg (MOVWconst [c])) => (MOVWconst [int32(uint8(c))])
+(MOVHreg (MOVWconst [c])) => (MOVWconst [int32(int16(c))])
+(MOVHUreg (MOVWconst [c])) => (MOVWconst [int32(uint16(c))])
(MOVWreg (MOVWconst [c])) => (MOVWconst [c])
// BFX: Width = c >> 8, LSB = c & 0xff, result = d << (32 - Width - LSB) >> (32 - Width)
(BFX [c] (MOVWconst [d])) => (MOVWconst [d<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))])
return true
}
// match: (MOVBUreg (MOVWconst [c]))
- // result: (MOVWconst [int64(uint8(c))])
+ // result: (MOVWconst [int32(uint8(c))])
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(uint8(c))
+ v.AuxInt = int32ToAuxInt(int32(uint8(c)))
return true
}
return false
return true
}
// match: (MOVBreg (MOVWconst [c]))
- // result: (MOVWconst [int64(int8(c))])
+ // result: (MOVWconst [int32(int8(c))])
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int8(c))
+ v.AuxInt = int32ToAuxInt(int32(int8(c)))
return true
}
return false
return true
}
// match: (MOVHUreg (MOVWconst [c]))
- // result: (MOVWconst [int64(uint16(c))])
+ // result: (MOVWconst [int32(uint16(c))])
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(uint16(c))
+ v.AuxInt = int32ToAuxInt(int32(uint16(c)))
return true
}
return false
return true
}
// match: (MOVHreg (MOVWconst [c]))
- // result: (MOVWconst [int64(int16(c))])
+ // result: (MOVWconst [int32(int16(c))])
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int16(c))
+ v.AuxInt = int32ToAuxInt(int32(int16(c)))
return true
}
return false
v_0 := v.Args[0]
// match: (Lsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
- // result: (SLLconst x [c])
+ // result: (SLLconst x [int32(c)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 16) {
break
}
v.reset(OpARMSLLconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
v_0 := v.Args[0]
// match: (Lsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
- // result: (SLLconst x [c])
+ // result: (SLLconst x [int32(c)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 32) {
break
}
v.reset(OpARMSLLconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
v_0 := v.Args[0]
// match: (Lsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
- // result: (SLLconst x [c])
+ // result: (SLLconst x [int32(c)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 8) {
break
}
v.reset(OpARMSLLconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
typ := &b.Func.Config.Types
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint64(c) < 16
- // result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
+ // result: (SRLconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 16) {
break
}
v.reset(OpARMSRLconst)
- v.AuxInt = c + 16
+ v.AuxInt = int32ToAuxInt(int32(c + 16))
v0 := b.NewValue0(v.Pos, OpARMSLLconst, typ.UInt32)
- v0.AuxInt = 16
+ v0.AuxInt = int32ToAuxInt(16)
v0.AddArg(x)
v.AddArg(v0)
return true
typ := &b.Func.Config.Types
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
- // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
+ // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 16) {
break
}
v.reset(OpARMSRAconst)
- v.AuxInt = c + 16
+ v.AuxInt = int32ToAuxInt(int32(c + 16))
v0 := b.NewValue0(v.Pos, OpARMSLLconst, typ.UInt32)
- v0.AuxInt = 16
+ v0.AuxInt = int32ToAuxInt(16)
v0.AddArg(x)
v.AddArg(v0)
return true
v_0 := v.Args[0]
// match: (Rsh32Ux64 x (Const64 [c]))
// cond: uint64(c) < 32
- // result: (SRLconst x [c])
+ // result: (SRLconst x [int32(c)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 32) {
break
}
v.reset(OpARMSRLconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
v_0 := v.Args[0]
// match: (Rsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
- // result: (SRAconst x [c])
+ // result: (SRAconst x [int32(c)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 32) {
break
}
v.reset(OpARMSRAconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
typ := &b.Func.Config.Types
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint64(c) < 8
- // result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
+ // result: (SRLconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 8) {
break
}
v.reset(OpARMSRLconst)
- v.AuxInt = c + 24
+ v.AuxInt = int32ToAuxInt(int32(c + 24))
v0 := b.NewValue0(v.Pos, OpARMSLLconst, typ.UInt32)
- v0.AuxInt = 24
+ v0.AuxInt = int32ToAuxInt(24)
v0.AddArg(x)
v.AddArg(v0)
return true
typ := &b.Func.Config.Types
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
- // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
+ // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)])
for {
x := v_0
if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt64(v_1.AuxInt)
if !(uint64(c) < 8) {
break
}
v.reset(OpARMSRAconst)
- v.AuxInt = c + 24
+ v.AuxInt = int32ToAuxInt(int32(c + 24))
v0 := b.NewValue0(v.Pos, OpARMSLLconst, typ.UInt32)
- v0.AuxInt = 24
+ v0.AuxInt = int32ToAuxInt(24)
v0.AddArg(x)
v.AddArg(v0)
return true