(MULQconst [c] (NEGQ x)) && c != -(1<<31) -> (MULQconst [-c] x)
// checking AND against 0.
-(CMPQconst (ANDQ x y) [0]) -> (TESTQ x y)
-(CMPLconst (ANDL x y) [0]) -> (TESTL x y)
-(CMPWconst (ANDL x y) [0]) -> (TESTW x y)
-(CMPBconst (ANDL x y) [0]) -> (TESTB x y)
-(CMPQconst (ANDQconst [c] x) [0]) -> (TESTQconst [c] x)
-(CMPLconst (ANDLconst [c] x) [0]) -> (TESTLconst [c] x)
-(CMPWconst (ANDLconst [c] x) [0]) -> (TESTWconst [int64(int16(c))] x)
-(CMPBconst (ANDLconst [c] x) [0]) -> (TESTBconst [int64(int8(c))] x)
+(CMPQconst a:(ANDQ x y) [0]) && a.Uses == 1 -> (TESTQ x y)
+(CMPLconst a:(ANDL x y) [0]) && a.Uses == 1 -> (TESTL x y)
+(CMPWconst a:(ANDL x y) [0]) && a.Uses == 1 -> (TESTW x y)
+(CMPBconst a:(ANDL x y) [0]) && a.Uses == 1 -> (TESTB x y)
+(CMPQconst a:(ANDQconst [c] x) [0]) && a.Uses == 1 -> (TESTQconst [c] x)
+(CMPLconst a:(ANDLconst [c] x) [0]) && a.Uses == 1 -> (TESTLconst [c] x)
+(CMPWconst a:(ANDLconst [c] x) [0]) && a.Uses == 1 -> (TESTWconst [int64(int16(c))] x)
+(CMPBconst a:(ANDLconst [c] x) [0]) && a.Uses == 1 -> (TESTBconst [int64(int8(c))] x)
// Convert TESTx to TESTxconst if possible.
(TESTQ (MOVQconst [c]) x) && is32Bit(c) -> (TESTQconst [c] x)
v.reset(OpAMD64FlagLT_ULT)
return true
}
- // match: (CMPBconst (ANDL x y) [0])
+ // match: (CMPBconst a:(ANDL x y) [0])
+ // cond: a.Uses == 1
// result: (TESTB x y)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDL {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDL {
+ break
+ }
+ y := a.Args[1]
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- y := v_0.Args[1]
- x := v_0.Args[0]
v.reset(OpAMD64TESTB)
v.AddArg2(x, y)
return true
}
- // match: (CMPBconst (ANDLconst [c] x) [0])
+ // match: (CMPBconst a:(ANDLconst [c] x) [0])
+ // cond: a.Uses == 1
// result: (TESTBconst [int64(int8(c))] x)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDLconst {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDLconst {
+ break
+ }
+ c := a.AuxInt
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- c := v_0.AuxInt
- x := v_0.Args[0]
v.reset(OpAMD64TESTBconst)
v.AuxInt = int64(int8(c))
v.AddArg(x)
v.reset(OpAMD64FlagLT_ULT)
return true
}
- // match: (CMPLconst (ANDL x y) [0])
+ // match: (CMPLconst a:(ANDL x y) [0])
+ // cond: a.Uses == 1
// result: (TESTL x y)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDL {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDL {
+ break
+ }
+ y := a.Args[1]
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- y := v_0.Args[1]
- x := v_0.Args[0]
v.reset(OpAMD64TESTL)
v.AddArg2(x, y)
return true
}
- // match: (CMPLconst (ANDLconst [c] x) [0])
+ // match: (CMPLconst a:(ANDLconst [c] x) [0])
+ // cond: a.Uses == 1
// result: (TESTLconst [c] x)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDLconst {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDLconst {
+ break
+ }
+ c := a.AuxInt
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- c := v_0.AuxInt
- x := v_0.Args[0]
v.reset(OpAMD64TESTLconst)
v.AuxInt = c
v.AddArg(x)
v.reset(OpAMD64FlagLT_ULT)
return true
}
- // match: (CMPQconst (ANDQ x y) [0])
+ // match: (CMPQconst a:(ANDQ x y) [0])
+ // cond: a.Uses == 1
// result: (TESTQ x y)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDQ {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDQ {
+ break
+ }
+ y := a.Args[1]
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- y := v_0.Args[1]
- x := v_0.Args[0]
v.reset(OpAMD64TESTQ)
v.AddArg2(x, y)
return true
}
- // match: (CMPQconst (ANDQconst [c] x) [0])
+ // match: (CMPQconst a:(ANDQconst [c] x) [0])
+ // cond: a.Uses == 1
// result: (TESTQconst [c] x)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDQconst {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDQconst {
+ break
+ }
+ c := a.AuxInt
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- c := v_0.AuxInt
- x := v_0.Args[0]
v.reset(OpAMD64TESTQconst)
v.AuxInt = c
v.AddArg(x)
v.reset(OpAMD64FlagLT_ULT)
return true
}
- // match: (CMPWconst (ANDL x y) [0])
+ // match: (CMPWconst a:(ANDL x y) [0])
+ // cond: a.Uses == 1
// result: (TESTW x y)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDL {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDL {
+ break
+ }
+ y := a.Args[1]
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- y := v_0.Args[1]
- x := v_0.Args[0]
v.reset(OpAMD64TESTW)
v.AddArg2(x, y)
return true
}
- // match: (CMPWconst (ANDLconst [c] x) [0])
+ // match: (CMPWconst a:(ANDLconst [c] x) [0])
+ // cond: a.Uses == 1
// result: (TESTWconst [int64(int16(c))] x)
for {
- if v.AuxInt != 0 || v_0.Op != OpAMD64ANDLconst {
+ if v.AuxInt != 0 {
+ break
+ }
+ a := v_0
+ if a.Op != OpAMD64ANDLconst {
+ break
+ }
+ c := a.AuxInt
+ x := a.Args[0]
+ if !(a.Uses == 1) {
break
}
- c := v_0.AuxInt
- x := v_0.Args[0]
v.reset(OpAMD64TESTWconst)
v.AuxInt = int64(int16(c))
v.AddArg(x)
--- /dev/null
+// asmcheck
+
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package codegen
+
+var gx, gy int
+
+// Test to make sure that (CMPQ (ANDQ x y) [0]) does not get rewritten to
+// (TESTQ x y) if the ANDQ has other uses. If that rewrite happens, then one
+// of the args of the ANDQ needs to be saved so it can be used as the arg to TESTQ.
+func andWithUse(x, y int) int {
+ // Load x,y into registers, so those MOVQ will not appear at the z := x&y line.
+ gx, gy = x, y
+ // amd64:-"MOVQ"
+ z := x & y
+ if z == 0 {
+ return 77
+ }
+ // use z by returning it
+ return z
+}