(Lsh64x16 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
(Lsh64x8 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
-(Lsh32x64 <t> x y) => (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
-(Lsh32x32 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
-(Lsh32x16 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
-(Lsh32x8 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
+(Lsh32x64 <t> x y) => (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
+(Lsh32x32 <t> x y) => (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
+(Lsh32x16 <t> x y) => (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
+(Lsh32x8 <t> x y) => (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
(Lsh16x64 <t> x y) => (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
(Lsh16x32 <t> x y) => (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
(Rsh64Ux16 <t> x y) => (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
(Rsh64Ux8 <t> x y) => (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
-(Rsh32Ux64 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt32to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
-(Rsh32Ux32 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
-(Rsh32Ux16 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
-(Rsh32Ux8 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
+(Rsh32Ux64 <t> x y) => (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
+(Rsh32Ux32 <t> x y) => (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
+(Rsh32Ux16 <t> x y) => (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
+(Rsh32Ux8 <t> x y) => (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
(Rsh16Ux64 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
(Rsh16Ux32 <t> x y) => (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
(Rsh64x16 <t> x y) => (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh64x8 <t> x y) => (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
-(Rsh32x64 <t> x y) => (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
-(Rsh32x32 <t> x y) => (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
-(Rsh32x16 <t> x y) => (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
-(Rsh32x8 <t> x y) => (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+(Rsh32x64 <t> x y) => (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
+(Rsh32x32 <t> x y) => (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
+(Rsh32x16 <t> x y) => (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
+(Rsh32x8 <t> x y) => (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
(Rsh16x64 <t> x y) => (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
(Rsh16x32 <t> x y) => (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(XOR x (MOVVconst [c])) && is32Bit(c) => (XORconst [c] x)
(NOR x (MOVVconst [c])) && is32Bit(c) => (NORconst [c] x)
+(SLL _ (MOVVconst [c])) && uint64(c)>=32 => (MOVVconst [0])
(SLLV _ (MOVVconst [c])) && uint64(c)>=64 => (MOVVconst [0])
+(SRL _ (MOVVconst [c])) && uint64(c)>=32 => (MOVVconst [0])
(SRLV _ (MOVVconst [c])) && uint64(c)>=64 => (MOVVconst [0])
+(SRA x (MOVVconst [c])) && uint64(c)>=32 => (SRAconst x [31])
(SRAV x (MOVVconst [c])) && uint64(c)>=64 => (SRAVconst x [63])
+(SLL x (MOVVconst [c])) && uint64(c) >=0 && uint64(c) <=31 => (SLLconst x [c])
(SLLV x (MOVVconst [c])) => (SLLVconst x [c])
+(SRL x (MOVVconst [c])) && uint64(c) >=0 && uint64(c) <=31 => (SRLconst x [c])
(SRLV x (MOVVconst [c])) => (SRLVconst x [c])
+(SRA x (MOVVconst [c])) && uint64(c) >=0 && uint64(c) <=31 => (SRAconst x [c])
(SRAV x (MOVVconst [c])) => (SRAVconst x [c])
(ROTR x (MOVVconst [c])) => (ROTRconst x [c&31])
(ROTRV x (MOVVconst [c])) => (ROTRVconst x [c&63])
+// Avoid unnecessary zero and sign extension when right shifting.
+(SRLVconst [rc] (MOVWUreg y)) && rc >= 0 && rc <= 31 => (SRLconst [int64(rc)] y)
+(SRAVconst [rc] (MOVWreg y)) && rc >= 0 && rc <= 31 => (SRAconst [int64(rc)] y)
+
+// Replace right shifts that exceed size of signed type.
+(SRAVconst <t> [rc] (MOVBreg y)) && rc >= 8 => (SRAVconst [63] (SLLVconst <t> [56] y))
+(SRAVconst <t> [rc] (MOVHreg y)) && rc >= 16 => (SRAVconst [63] (SLLVconst <t> [48] y))
+(SRAVconst <t> [rc] (MOVWreg y)) && rc >= 32 => (SRAconst [31] y)
+
// If the shift amount is larger than the datasize(32, 16, 8), we can optimize to constant 0.
(MOVWUreg (SLLVconst [lc] x)) && lc >= 32 => (MOVVconst [0])
(MOVHUreg (SLLVconst [lc] x)) && lc >= 16 => (MOVVconst [0])
return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
case OpLOONG64SGTconst:
return rewriteValueLOONG64_OpLOONG64SGTconst(v)
+ case OpLOONG64SLL:
+ return rewriteValueLOONG64_OpLOONG64SLL(v)
case OpLOONG64SLLV:
return rewriteValueLOONG64_OpLOONG64SLLV(v)
case OpLOONG64SLLVconst:
return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
+ case OpLOONG64SRA:
+ return rewriteValueLOONG64_OpLOONG64SRA(v)
case OpLOONG64SRAV:
return rewriteValueLOONG64_OpLOONG64SRAV(v)
case OpLOONG64SRAVconst:
return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
+ case OpLOONG64SRL:
+ return rewriteValueLOONG64_OpLOONG64SRL(v)
case OpLOONG64SRLV:
return rewriteValueLOONG64_OpLOONG64SRLV(v)
case OpLOONG64SRLVconst:
}
return false
}
+func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SLL _ (MOVVconst [c]))
+ // cond: uint64(c)>=32
+ // result: (MOVVconst [0])
+ for {
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpLOONG64MOVVconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ // match: (SLL x (MOVVconst [c]))
+ // cond: uint64(c) >=0 && uint64(c) <=31
+ // result: (SLLconst x [c])
+ for {
+ x := v_0
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 0 && uint64(c) <= 31) {
+ break
+ }
+ v.reset(OpLOONG64SLLconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
}
return false
}
+func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRA x (MOVVconst [c]))
+ // cond: uint64(c)>=32
+ // result: (SRAconst x [31])
+ for {
+ x := v_0
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpLOONG64SRAconst)
+ v.AuxInt = int64ToAuxInt(31)
+ v.AddArg(x)
+ return true
+ }
+ // match: (SRA x (MOVVconst [c]))
+ // cond: uint64(c) >=0 && uint64(c) <=31
+ // result: (SRAconst x [c])
+ for {
+ x := v_0
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 0 && uint64(c) <= 31) {
+ break
+ }
+ v.reset(OpLOONG64SRAconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
}
func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
v_0 := v.Args[0]
+ b := v.Block
+ // match: (SRAVconst [rc] (MOVWreg y))
+ // cond: rc >= 0 && rc <= 31
+ // result: (SRAconst [int64(rc)] y)
+ for {
+ rc := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64MOVWreg {
+ break
+ }
+ y := v_0.Args[0]
+ if !(rc >= 0 && rc <= 31) {
+ break
+ }
+ v.reset(OpLOONG64SRAconst)
+ v.AuxInt = int64ToAuxInt(int64(rc))
+ v.AddArg(y)
+ return true
+ }
+ // match: (SRAVconst <t> [rc] (MOVBreg y))
+ // cond: rc >= 8
+ // result: (SRAVconst [63] (SLLVconst <t> [56] y))
+ for {
+ t := v.Type
+ rc := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64MOVBreg {
+ break
+ }
+ y := v_0.Args[0]
+ if !(rc >= 8) {
+ break
+ }
+ v.reset(OpLOONG64SRAVconst)
+ v.AuxInt = int64ToAuxInt(63)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
+ v0.AuxInt = int64ToAuxInt(56)
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAVconst <t> [rc] (MOVHreg y))
+ // cond: rc >= 16
+ // result: (SRAVconst [63] (SLLVconst <t> [48] y))
+ for {
+ t := v.Type
+ rc := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64MOVHreg {
+ break
+ }
+ y := v_0.Args[0]
+ if !(rc >= 16) {
+ break
+ }
+ v.reset(OpLOONG64SRAVconst)
+ v.AuxInt = int64ToAuxInt(63)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
+ v0.AuxInt = int64ToAuxInt(48)
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAVconst <t> [rc] (MOVWreg y))
+ // cond: rc >= 32
+ // result: (SRAconst [31] y)
+ for {
+ rc := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64MOVWreg {
+ break
+ }
+ y := v_0.Args[0]
+ if !(rc >= 32) {
+ break
+ }
+ v.reset(OpLOONG64SRAconst)
+ v.AuxInt = int64ToAuxInt(31)
+ v.AddArg(y)
+ return true
+ }
// match: (SRAVconst [c] (MOVVconst [d]))
// result: (MOVVconst [d>>uint64(c)])
for {
}
return false
}
+func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRL _ (MOVVconst [c]))
+ // cond: uint64(c)>=32
+ // result: (MOVVconst [0])
+ for {
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpLOONG64MOVVconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ // match: (SRL x (MOVVconst [c]))
+ // cond: uint64(c) >=0 && uint64(c) <=31
+ // result: (SRLconst x [c])
+ for {
+ x := v_0
+ if v_1.Op != OpLOONG64MOVVconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if !(uint64(c) >= 0 && uint64(c) <= 31) {
+ break
+ }
+ v.reset(OpLOONG64SRLconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
v.AddArg(x)
return true
}
+ // match: (SRLVconst [rc] (MOVWUreg y))
+ // cond: rc >= 0 && rc <= 31
+ // result: (SRLconst [int64(rc)] y)
+ for {
+ rc := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64MOVWUreg {
+ break
+ }
+ y := v_0.Args[0]
+ if !(rc >= 0 && rc <= 31) {
+ break
+ }
+ v.reset(OpLOONG64SRLconst)
+ v.AuxInt = int64ToAuxInt(int64(rc))
+ v.AddArg(y)
+ return true
+ }
// match: (SRLVconst [rc] (MOVWUreg x))
// cond: rc >= 32
// result: (MOVVconst [0])
b := v.Block
typ := &b.Func.Config.Types
// match: (Lsh32x16 <t> x y)
- // result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
+ // result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v3.AuxInt = int64ToAuxInt(64)
+ v3.AuxInt = int64ToAuxInt(32)
v2.AddArg2(v3, v1)
v.AddArg2(v0, v2)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (Lsh32x32 <t> x y)
- // result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
+ // result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v3.AuxInt = int64ToAuxInt(64)
+ v3.AuxInt = int64ToAuxInt(32)
v2.AddArg2(v3, v1)
v.AddArg2(v0, v2)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (Lsh32x64 <t> x y)
- // result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
+ // result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
v0.AddArg2(x, y)
v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v2.AuxInt = int64ToAuxInt(64)
+ v2.AuxInt = int64ToAuxInt(32)
v1.AddArg2(v2, y)
v.AddArg2(v0, v1)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (Lsh32x8 <t> x y)
- // result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
+ // result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg2(x, v1)
v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v3.AuxInt = int64ToAuxInt(64)
+ v3.AuxInt = int64ToAuxInt(32)
v2.AddArg2(v3, v1)
v.AddArg2(v0, v2)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32Ux16 <t> x y)
- // result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
+ // result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
- v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
- v2.AddArg(y)
- v0.AddArg2(v1, v2)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v4.AuxInt = int64ToAuxInt(64)
- v3.AddArg2(v4, v2)
- v.AddArg2(v0, v3)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
+ v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+ v1.AddArg(y)
+ v0.AddArg2(x, v1)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v3.AuxInt = int64ToAuxInt(32)
+ v2.AddArg2(v3, v1)
+ v.AddArg2(v0, v2)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32Ux32 <t> x y)
- // result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
+ // result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v2.AddArg(y)
- v0.AddArg2(v1, v2)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v4.AuxInt = int64ToAuxInt(64)
- v3.AddArg2(v4, v2)
- v.AddArg2(v0, v3)
+ v1.AddArg(y)
+ v0.AddArg2(x, v1)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v3.AuxInt = int64ToAuxInt(32)
+ v2.AddArg2(v3, v1)
+ v.AddArg2(v0, v2)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32Ux64 <t> x y)
- // result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
+ // result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
- v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v1.AddArg(x)
- v0.AddArg2(v1, y)
- v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v3.AuxInt = int64ToAuxInt(64)
- v2.AddArg2(v3, y)
- v.AddArg2(v0, v2)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
+ v0.AddArg2(x, y)
+ v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v2.AuxInt = int64ToAuxInt(32)
+ v1.AddArg2(v2, y)
+ v.AddArg2(v0, v1)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32Ux8 <t> x y)
- // result: (MASKEQZ (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
+ // result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
for {
t := v.Type
x := v_0
y := v_1
v.reset(OpLOONG64MASKEQZ)
- v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
- v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
- v2.AddArg(y)
- v0.AddArg2(v1, v2)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v4.AuxInt = int64ToAuxInt(64)
- v3.AddArg2(v4, v2)
- v.AddArg2(v0, v3)
+ v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
+ v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+ v1.AddArg(y)
+ v0.AddArg2(x, v1)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v3.AuxInt = int64ToAuxInt(32)
+ v2.AddArg2(v3, v1)
+ v.AddArg2(v0, v2)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32x16 <t> x y)
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+ // result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
for {
t := v.Type
x := v_0
y := v_1
- v.reset(OpLOONG64SRAV)
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
- v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
- v4.AddArg(y)
- v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v5.AuxInt = int64ToAuxInt(63)
- v3.AddArg2(v4, v5)
- v2.AddArg(v3)
- v1.AddArg2(v2, v4)
- v.AddArg2(v0, v1)
+ v.reset(OpLOONG64SRA)
+ v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
+ v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+ v3.AddArg(y)
+ v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v4.AuxInt = int64ToAuxInt(31)
+ v2.AddArg2(v3, v4)
+ v1.AddArg(v2)
+ v0.AddArg2(v1, v3)
+ v.AddArg2(x, v0)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32x32 <t> x y)
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+ // result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
for {
t := v.Type
x := v_0
y := v_1
- v.reset(OpLOONG64SRAV)
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
- v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v4.AddArg(y)
- v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v5.AuxInt = int64ToAuxInt(63)
- v3.AddArg2(v4, v5)
- v2.AddArg(v3)
- v1.AddArg2(v2, v4)
- v.AddArg2(v0, v1)
+ v.reset(OpLOONG64SRA)
+ v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
+ v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+ v3.AddArg(y)
+ v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v4.AuxInt = int64ToAuxInt(31)
+ v2.AddArg2(v3, v4)
+ v1.AddArg(v2)
+ v0.AddArg2(v1, v3)
+ v.AddArg2(x, v0)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32x64 <t> x y)
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
+ // result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
for {
t := v.Type
x := v_0
y := v_1
- v.reset(OpLOONG64SRAV)
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
- v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v4.AuxInt = int64ToAuxInt(63)
- v3.AddArg2(y, v4)
- v2.AddArg(v3)
- v1.AddArg2(v2, y)
- v.AddArg2(v0, v1)
+ v.reset(OpLOONG64SRA)
+ v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
+ v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v3.AuxInt = int64ToAuxInt(31)
+ v2.AddArg2(y, v3)
+ v1.AddArg(v2)
+ v0.AddArg2(v1, y)
+ v.AddArg2(x, v0)
return true
}
}
b := v.Block
typ := &b.Func.Config.Types
// match: (Rsh32x8 <t> x y)
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+ // result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
for {
t := v.Type
x := v_0
y := v_1
- v.reset(OpLOONG64SRAV)
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
- v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
- v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
- v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
- v4.AddArg(y)
- v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
- v5.AuxInt = int64ToAuxInt(63)
- v3.AddArg2(v4, v5)
- v2.AddArg(v3)
- v1.AddArg2(v2, v4)
- v.AddArg2(v0, v1)
+ v.reset(OpLOONG64SRA)
+ v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
+ v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
+ v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
+ v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+ v3.AddArg(y)
+ v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
+ v4.AuxInt = int64ToAuxInt(31)
+ v2.AddArg2(v3, v4)
+ v1.AddArg(v2)
+ v0.AddArg2(v1, v3)
+ v.AddArg2(x, v0)
return true
}
}