(Com32 x) -> (NOTW x)
(Com16 x) -> (NOTW x)
(Com8 x) -> (NOTW x)
-(NOT x) && true -> (XORconst [-1] x)
+(NOT x) && true -> (XOR (MOVDconst [-1]) x)
(NOTW x) && true -> (XORWconst [-1] x)
// Lowering boolean ops
(MULLW x (MOVDconst [c])) -> (MULLWconst [c] x)
(MULLW (MOVDconst [c]) x) -> (MULLWconst [c] x)
-(AND x (MOVDconst [c])) && is32Bit(c) -> (ANDconst [c] x)
-(AND (MOVDconst [c]) x) && is32Bit(c) -> (ANDconst [c] x)
+// NILF instructions leave the high 32 bits unchanged which is
+// equivalent to the leftmost 32 bits being set.
+// TODO(mundaym): modify the assembler to accept 64-bit values
+// and use isU32Bit(^c).
+(AND x (MOVDconst [c])) && is32Bit(c) && c < 0 -> (ANDconst [c] x)
+(AND (MOVDconst [c]) x) && is32Bit(c) && c < 0 -> (ANDconst [c] x)
(ANDW x (MOVDconst [c])) -> (ANDWconst [c] x)
(ANDW (MOVDconst [c]) x) -> (ANDWconst [c] x)
(ANDWconst [c] (ANDWconst [d] x)) -> (ANDWconst [c & d] x)
(ANDconst [c] (ANDconst [d] x)) -> (ANDconst [c & d] x)
-(OR x (MOVDconst [c])) && is32Bit(c) -> (ORconst [c] x)
-(OR (MOVDconst [c]) x) && is32Bit(c) -> (ORconst [c] x)
+(OR x (MOVDconst [c])) && isU32Bit(c) -> (ORconst [c] x)
+(OR (MOVDconst [c]) x) && isU32Bit(c) -> (ORconst [c] x)
(ORW x (MOVDconst [c])) -> (ORWconst [c] x)
(ORW (MOVDconst [c]) x) -> (ORWconst [c] x)
-(XOR x (MOVDconst [c])) && is32Bit(c) -> (XORconst [c] x)
-(XOR (MOVDconst [c]) x) && is32Bit(c) -> (XORconst [c] x)
+(XOR x (MOVDconst [c])) && isU32Bit(c) -> (XORconst [c] x)
+(XOR (MOVDconst [c]) x) && isU32Bit(c) -> (XORconst [c] x)
(XORW x (MOVDconst [c])) -> (XORWconst [c] x)
(XORW (MOVDconst [c]) x) -> (XORWconst [c] x)
(CMPWU x (MOVDconst [c])) -> (CMPWUconst x [int64(uint32(c))])
(CMPWU (MOVDconst [c]) x) -> (InvertFlags (CMPWUconst x [int64(uint32(c))]))
-// Using MOVBZreg instead of AND is cheaper.
-(ANDconst [0xFF] x) -> (MOVBZreg x)
-(ANDconst [0xFFFF] x) -> (MOVHZreg x)
-(ANDconst [0xFFFFFFFF] x) -> (MOVWZreg x)
+// Using MOV{W,H,B}Zreg instead of AND is cheaper.
+(AND (MOVDconst [0xFF]) x) -> (MOVBZreg x)
+(AND x (MOVDconst [0xFF])) -> (MOVBZreg x)
+(AND (MOVDconst [0xFFFF]) x) -> (MOVHZreg x)
+(AND x (MOVDconst [0xFFFF])) -> (MOVHZreg x)
+(AND (MOVDconst [0xFFFFFFFF]) x) -> (MOVWZreg x)
+(AND x (MOVDconst [0xFFFFFFFF])) -> (MOVWZreg x)
+(ANDWconst [0xFF] x) -> (MOVBZreg x)
+(ANDWconst [0xFFFF] x) -> (MOVHZreg x)
// strength reduction
(MULLDconst [-1] x) -> (NEG x)
(MOVWZload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
(MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
-// Fold extensions and ANDs together.
-(MOVBZreg (ANDWconst [c] x)) -> (ANDconst [c & 0xff] x)
-(MOVHZreg (ANDWconst [c] x)) -> (ANDconst [c & 0xffff] x)
-(MOVWZreg (ANDWconst [c] x)) -> (ANDconst [c & 0xffffffff] x)
-(MOVBreg (ANDWconst [c] x)) && c & 0x80 == 0 -> (ANDconst [c & 0x7f] x)
-(MOVHreg (ANDWconst [c] x)) && c & 0x8000 == 0 -> (ANDconst [c & 0x7fff] x)
-(MOVWreg (ANDWconst [c] x)) && c & 0x80000000 == 0 -> (ANDconst [c & 0x7fffffff] x)
-
-(MOVBZreg (ANDconst [c] x)) -> (ANDconst [c & 0xff] x)
-(MOVHZreg (ANDconst [c] x)) -> (ANDconst [c & 0xffff] x)
-(MOVWZreg (ANDconst [c] x)) -> (ANDconst [c & 0xffffffff] x)
-(MOVBreg (ANDconst [c] x)) && c & 0x80 == 0 -> (ANDconst [c & 0x7f] x)
-(MOVHreg (ANDconst [c] x)) && c & 0x8000 == 0 -> (ANDconst [c & 0x7fff] x)
-(MOVWreg (ANDconst [c] x)) && c & 0x80000000 == 0 -> (ANDconst [c & 0x7fffffff] x)
-
// Don't extend before storing
(MOVWstore [off] {sym} ptr (MOVWreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
(MOVHstore [off] {sym} ptr (MOVHreg x) mem) -> (MOVHstore [off] {sym} ptr x mem)
(NEGW (MOVDconst [c])) -> (MOVDconst [int64(int32(-c))])
(MULLDconst [c] (MOVDconst [d])) -> (MOVDconst [c*d])
(MULLWconst [c] (MOVDconst [d])) -> (MOVDconst [int64(int32(c*d))])
+(AND (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c&d])
(ANDconst [c] (MOVDconst [d])) -> (MOVDconst [c&d])
(ANDWconst [c] (MOVDconst [d])) -> (MOVDconst [c&d])
+(OR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c|d])
(ORconst [c] (MOVDconst [d])) -> (MOVDconst [c|d])
(ORWconst [c] (MOVDconst [d])) -> (MOVDconst [c|d])
+(XOR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c^d])
(XORconst [c] (MOVDconst [d])) -> (MOVDconst [c^d])
(XORWconst [c] (MOVDconst [d])) -> (MOVDconst [c^d])
-(NOT (MOVDconst [c])) -> (MOVDconst [^c])
-(NOTW (MOVDconst [c])) -> (MOVDconst [^c])
// generic simplifications
// TODO: more of this
b := v.Block
_ = b
// match: (AND x (MOVDconst [c]))
- // cond: is32Bit(c)
+ // cond: is32Bit(c) && c < 0
// result: (ANDconst [c] x)
for {
x := v.Args[0]
break
}
c := v_1.AuxInt
- if !(is32Bit(c)) {
+ if !(is32Bit(c) && c < 0) {
break
}
v.reset(OpS390XANDconst)
return true
}
// match: (AND (MOVDconst [c]) x)
- // cond: is32Bit(c)
+ // cond: is32Bit(c) && c < 0
// result: (ANDconst [c] x)
for {
v_0 := v.Args[0]
}
c := v_0.AuxInt
x := v.Args[1]
- if !(is32Bit(c)) {
+ if !(is32Bit(c) && c < 0) {
break
}
v.reset(OpS390XANDconst)
v.AddArg(x)
return true
}
+ // match: (AND (MOVDconst [0xFF]) x)
+ // cond:
+ // result: (MOVBZreg x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_0.AuxInt != 0xFF {
+ break
+ }
+ x := v.Args[1]
+ v.reset(OpS390XMOVBZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND x (MOVDconst [0xFF]))
+ // cond:
+ // result: (MOVBZreg x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_1.AuxInt != 0xFF {
+ break
+ }
+ v.reset(OpS390XMOVBZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND (MOVDconst [0xFFFF]) x)
+ // cond:
+ // result: (MOVHZreg x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_0.AuxInt != 0xFFFF {
+ break
+ }
+ x := v.Args[1]
+ v.reset(OpS390XMOVHZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND x (MOVDconst [0xFFFF]))
+ // cond:
+ // result: (MOVHZreg x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_1.AuxInt != 0xFFFF {
+ break
+ }
+ v.reset(OpS390XMOVHZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND (MOVDconst [0xFFFFFFFF]) x)
+ // cond:
+ // result: (MOVWZreg x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_0.AuxInt != 0xFFFFFFFF {
+ break
+ }
+ x := v.Args[1]
+ v.reset(OpS390XMOVWZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND x (MOVDconst [0xFFFFFFFF]))
+ // cond:
+ // result: (MOVWZreg x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ if v_1.AuxInt != 0xFFFFFFFF {
+ break
+ }
+ v.reset(OpS390XMOVWZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c&d])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpS390XMOVDconst)
+ v.AuxInt = c & d
+ return true
+ }
// match: (AND x x)
// cond:
// result: x
v.AddArg(x)
return true
}
+ // match: (ANDWconst [0xFF] x)
+ // cond:
+ // result: (MOVBZreg x)
+ for {
+ if v.AuxInt != 0xFF {
+ break
+ }
+ x := v.Args[0]
+ v.reset(OpS390XMOVBZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (ANDWconst [0xFFFF] x)
+ // cond:
+ // result: (MOVHZreg x)
+ for {
+ if v.AuxInt != 0xFFFF {
+ break
+ }
+ x := v.Args[0]
+ v.reset(OpS390XMOVHZreg)
+ v.AddArg(x)
+ return true
+ }
// match: (ANDWconst [c] _)
// cond: int32(c)==0
// result: (MOVDconst [0])
v.AddArg(x)
return true
}
- // match: (ANDconst [0xFF] x)
- // cond:
- // result: (MOVBZreg x)
- for {
- if v.AuxInt != 0xFF {
- break
- }
- x := v.Args[0]
- v.reset(OpS390XMOVBZreg)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [0xFFFF] x)
- // cond:
- // result: (MOVHZreg x)
- for {
- if v.AuxInt != 0xFFFF {
- break
- }
- x := v.Args[0]
- v.reset(OpS390XMOVHZreg)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [0xFFFFFFFF] x)
- // cond:
- // result: (MOVWZreg x)
- for {
- if v.AuxInt != 0xFFFFFFFF {
- break
- }
- x := v.Args[0]
- v.reset(OpS390XMOVWZreg)
- v.AddArg(x)
- return true
- }
// match: (ANDconst [0] _)
// cond:
// result: (MOVDconst [0])
v0.AddArg(mem)
return true
}
- // match: (MOVBZreg (ANDWconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xff
- v.AddArg(x)
- return true
- }
- // match: (MOVBZreg (ANDconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xff
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVBload(v *Value, config *Config) bool {
v0.AddArg(mem)
return true
}
- // match: (MOVBreg (ANDWconst [c] x))
- // cond: c & 0x80 == 0
- // result: (ANDconst [c & 0x7f] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x80 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7f
- v.AddArg(x)
- return true
- }
- // match: (MOVBreg (ANDconst [c] x))
- // cond: c & 0x80 == 0
- // result: (ANDconst [c & 0x7f] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x80 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7f
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVBstore(v *Value, config *Config) bool {
v0.AddArg(mem)
return true
}
- // match: (MOVHZreg (ANDWconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xffff
- v.AddArg(x)
- return true
- }
- // match: (MOVHZreg (ANDconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xffff
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVHload(v *Value, config *Config) bool {
v0.AddArg(mem)
return true
}
- // match: (MOVHreg (ANDWconst [c] x))
- // cond: c & 0x8000 == 0
- // result: (ANDconst [c & 0x7fff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x8000 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7fff
- v.AddArg(x)
- return true
- }
- // match: (MOVHreg (ANDconst [c] x))
- // cond: c & 0x8000 == 0
- // result: (ANDconst [c & 0x7fff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x8000 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7fff
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVHstore(v *Value, config *Config) bool {
v0.AddArg(mem)
return true
}
- // match: (MOVWZreg (ANDWconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xffffffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xffffffff
- v.AddArg(x)
- return true
- }
- // match: (MOVWZreg (ANDconst [c] x))
- // cond:
- // result: (ANDconst [c & 0xffffffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0xffffffff
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVWload(v *Value, config *Config) bool {
v0.AddArg(mem)
return true
}
- // match: (MOVWreg (ANDWconst [c] x))
- // cond: c & 0x80000000 == 0
- // result: (ANDconst [c & 0x7fffffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDWconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x80000000 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7fffffff
- v.AddArg(x)
- return true
- }
- // match: (MOVWreg (ANDconst [c] x))
- // cond: c & 0x80000000 == 0
- // result: (ANDconst [c & 0x7fffffff] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XANDconst {
- break
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if !(c&0x80000000 == 0) {
- break
- }
- v.reset(OpS390XANDconst)
- v.AuxInt = c & 0x7fffffff
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValueS390X_OpS390XMOVWstore(v *Value, config *Config) bool {
_ = b
// match: (NOT x)
// cond: true
- // result: (XORconst [-1] x)
+ // result: (XOR (MOVDconst [-1]) x)
for {
x := v.Args[0]
if !(true) {
break
}
- v.reset(OpS390XXORconst)
- v.AuxInt = -1
+ v.reset(OpS390XXOR)
+ v0 := b.NewValue0(v.Line, OpS390XMOVDconst, config.fe.TypeUInt64())
+ v0.AuxInt = -1
+ v.AddArg(v0)
v.AddArg(x)
return true
}
- // match: (NOT (MOVDconst [c]))
- // cond:
- // result: (MOVDconst [^c])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XMOVDconst {
- break
- }
- c := v_0.AuxInt
- v.reset(OpS390XMOVDconst)
- v.AuxInt = ^c
- return true
- }
return false
}
func rewriteValueS390X_OpS390XNOTW(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (NOTW (MOVDconst [c]))
- // cond:
- // result: (MOVDconst [^c])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpS390XMOVDconst {
- break
- }
- c := v_0.AuxInt
- v.reset(OpS390XMOVDconst)
- v.AuxInt = ^c
- return true
- }
return false
}
func rewriteValueS390X_OpS390XOR(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (OR x (MOVDconst [c]))
- // cond: is32Bit(c)
+ // cond: isU32Bit(c)
// result: (ORconst [c] x)
for {
x := v.Args[0]
break
}
c := v_1.AuxInt
- if !(is32Bit(c)) {
+ if !(isU32Bit(c)) {
break
}
v.reset(OpS390XORconst)
return true
}
// match: (OR (MOVDconst [c]) x)
- // cond: is32Bit(c)
+ // cond: isU32Bit(c)
// result: (ORconst [c] x)
for {
v_0 := v.Args[0]
}
c := v_0.AuxInt
x := v.Args[1]
- if !(is32Bit(c)) {
+ if !(isU32Bit(c)) {
break
}
v.reset(OpS390XORconst)
v.AddArg(x)
return true
}
+ // match: (OR (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c|d])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpS390XMOVDconst)
+ v.AuxInt = c | d
+ return true
+ }
// match: (OR x x)
// cond:
// result: x
b := v.Block
_ = b
// match: (XOR x (MOVDconst [c]))
- // cond: is32Bit(c)
+ // cond: isU32Bit(c)
// result: (XORconst [c] x)
for {
x := v.Args[0]
break
}
c := v_1.AuxInt
- if !(is32Bit(c)) {
+ if !(isU32Bit(c)) {
break
}
v.reset(OpS390XXORconst)
return true
}
// match: (XOR (MOVDconst [c]) x)
- // cond: is32Bit(c)
+ // cond: isU32Bit(c)
// result: (XORconst [c] x)
for {
v_0 := v.Args[0]
}
c := v_0.AuxInt
x := v.Args[1]
- if !(is32Bit(c)) {
+ if !(isU32Bit(c)) {
break
}
v.reset(OpS390XXORconst)
v.AddArg(x)
return true
}
+ // match: (XOR (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c^d])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpS390XMOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpS390XMOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpS390XMOVDconst)
+ v.AuxInt = c ^ d
+ return true
+ }
// match: (XOR x x)
// cond:
// result: (MOVDconst [0])