(AtomicOr32 ...) => (LoweredAtomicOr32 ...)
// Conditional branches
-(If cond yes no) => (BNEZ cond yes no)
+(If cond yes no) => (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
// Optimizations
(MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVWstorezero [off] {sym} ptr mem)
(MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVDstorezero [off] {sym} ptr mem)
+// Boolean ops are already extended.
+(MOVBUreg x:((SEQZ|SNEZ) _)) => x
+(MOVBUreg x:((SLT|SLTU) _ _)) => x
+
// Avoid sign/zero extension for consts.
(MOVBreg (MOVDconst [c])) => (MOVDconst [int64(int8(c))])
(MOVHreg (MOVDconst [c])) => (MOVDconst [int64(int16(c))])
func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
+ // match: (MOVBUreg x:(SEQZ _))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpRISCV64SEQZ {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
+ // match: (MOVBUreg x:(SNEZ _))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpRISCV64SNEZ {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
+ // match: (MOVBUreg x:(SLT _ _))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpRISCV64SLT {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
+ // match: (MOVBUreg x:(SLTU _ _))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpRISCV64SLTU {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
// match: (MOVBUreg (MOVDconst [c]))
// result: (MOVDconst [int64(uint8(c))])
for {
}
}
func rewriteBlockRISCV64(b *Block) bool {
+ typ := &b.Func.Config.Types
switch b.Kind {
case BlockRISCV64BEQ:
// match: (BEQ (MOVDconst [0]) cond yes no)
}
case BlockIf:
// match: (If cond yes no)
- // result: (BNEZ cond yes no)
+ // result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
for {
cond := b.Controls[0]
- b.resetWithControl(BlockRISCV64BNEZ, cond)
+ v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
+ v0.AddArg(cond)
+ b.resetWithControl(BlockRISCV64BNEZ, v0)
return true
}
}