(BNEZ (SEQZ x) yes no) => (BEQZ x yes no)
(BNEZ (SNEZ x) yes no) => (BNEZ x yes no)
-// Absorb NEG into branch.
+// Remove redundant NEG from BEQZ/BNEZ.
(BEQZ (NEG x) yes no) => (BEQZ x yes no)
(BNEZ (NEG x) yes no) => (BNEZ x yes no)
(BGE (MOVDconst [0]) cond yes no) => (BLEZ cond yes no)
(BGE cond (MOVDconst [0]) yes no) => (BGEZ cond yes no)
-// Remove NEG when used with SEQZ/SNEZ.
+// Remove redundant NEG from SEQZ/SNEZ.
(SEQZ (NEG x)) => (SEQZ x)
(SNEZ (NEG x)) => (SNEZ x)
-// Store zero
+// Remove redundant SEQZ/SNEZ.
+(SEQZ (SEQZ x)) => (SNEZ x)
+(SEQZ (SNEZ x)) => (SEQZ x)
+(SNEZ (SEQZ x)) => (SEQZ x)
+(SNEZ (SNEZ x)) => (SNEZ x)
+
+// Store zero.
(MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVBstorezero [off] {sym} ptr mem)
(MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVHstorezero [off] {sym} ptr mem)
(MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVWstorezero [off] {sym} ptr mem)
v.AddArg(x)
return true
}
+ // match: (SEQZ (SEQZ x))
+ // result: (SNEZ x)
+ for {
+ if v_0.Op != OpRISCV64SEQZ {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpRISCV64SNEZ)
+ v.AddArg(x)
+ return true
+ }
+ // match: (SEQZ (SNEZ x))
+ // result: (SEQZ x)
+ for {
+ if v_0.Op != OpRISCV64SNEZ {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpRISCV64SEQZ)
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
v.AddArg(x)
return true
}
+ // match: (SNEZ (SEQZ x))
+ // result: (SEQZ x)
+ for {
+ if v_0.Op != OpRISCV64SEQZ {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpRISCV64SEQZ)
+ v.AddArg(x)
+ return true
+ }
+ // match: (SNEZ (SNEZ x))
+ // result: (SNEZ x)
+ for {
+ if v_0.Op != OpRISCV64SNEZ {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpRISCV64SNEZ)
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {