(NeqSlice x y) -> (NeqPtr (SlicePtr x) (SlicePtr y))
// Load of store of same address, with compatibly typed value and same size
-(Load <t1> p1 (Store {t2} p2 x _)) && isSamePtr(p1,p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.(*types.Type).Size() -> x
+(Load <t1> p1 (Store {t2} p2 x _))
+ && isSamePtr(p1, p2)
+ && t1.Compare(x.Type) == types.CMPeq
+ && t1.Size() == sizeof(t2)
+ -> x
+(Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
+ && isSamePtr(p1, p3)
+ && t1.Compare(x.Type) == types.CMPeq
+ && t1.Size() == sizeof(t2)
+ && disjoint(p3, sizeof(t3), p2, sizeof(t2))
+ -> x
+(Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
+ && isSamePtr(p1, p4)
+ && t1.Compare(x.Type) == types.CMPeq
+ && t1.Size() == sizeof(t2)
+ && disjoint(p4, sizeof(t4), p2, sizeof(t2))
+ && disjoint(p4, sizeof(t4), p3, sizeof(t3))
+ -> x
+(Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
+ && isSamePtr(p1, p5)
+ && t1.Compare(x.Type) == types.CMPeq
+ && t1.Size() == sizeof(t2)
+ && disjoint(p5, sizeof(t5), p2, sizeof(t2))
+ && disjoint(p5, sizeof(t5), p3, sizeof(t3))
+ && disjoint(p5, sizeof(t5), p4, sizeof(t4))
+ -> x
// Pass constants through math.Float{32,64}bits and math.Float{32,64}frombits
-(Load <t1> p1 (Store {t2} p2 (Const64 [x]) _)) && isSamePtr(p1,p2) && t2.(*types.Type).Size() == 8 && is64BitFloat(t1) -> (Const64F [x])
-(Load <t1> p1 (Store {t2} p2 (Const32 [x]) _)) && isSamePtr(p1,p2) && t2.(*types.Type).Size() == 4 && is32BitFloat(t1) -> (Const32F [f2i(float64(math.Float32frombits(uint32(x))))])
-(Load <t1> p1 (Store {t2} p2 (Const64F [x]) _)) && isSamePtr(p1,p2) && t2.(*types.Type).Size() == 8 && is64BitInt(t1) -> (Const64 [x])
-(Load <t1> p1 (Store {t2} p2 (Const32F [x]) _)) && isSamePtr(p1,p2) && t2.(*types.Type).Size() == 4 && is32BitInt(t1) -> (Const32 [int64(int32(math.Float32bits(float32(i2f(x)))))])
+(Load <t1> p1 (Store {t2} p2 (Const64 [x]) _)) && isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitFloat(t1) -> (Const64F [x])
+(Load <t1> p1 (Store {t2} p2 (Const32 [x]) _)) && isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitFloat(t1) -> (Const32F [f2i(float64(math.Float32frombits(uint32(x))))])
+(Load <t1> p1 (Store {t2} p2 (Const64F [x]) _)) && isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitInt(t1) -> (Const64 [x])
+(Load <t1> p1 (Store {t2} p2 (Const32F [x]) _)) && isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitInt(t1) -> (Const32 [int64(int32(math.Float32bits(float32(i2f(x)))))])
+
+// Float Loads up to Zeros so they can be constant folded.
+(Load <t1> op:(OffPtr [o1] p1)
+ (Store {t2} p2 _
+ mem:(Zero [n] p3 _)))
+ && o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3)
+ && disjoint(op, t1.Size(), p2, sizeof(t2))
+ -> @mem.Block (Load <t1> op mem)
+(Load <t1> op:(OffPtr [o1] p1)
+ (Store {t2} p2 _
+ (Store {t3} p3 _
+ mem:(Zero [n] p4 _))))
+ && o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4)
+ && disjoint(op, t1.Size(), p2, sizeof(t2))
+ && disjoint(op, t1.Size(), p3, sizeof(t3))
+ -> @mem.Block (Load <t1> op mem)
+(Load <t1> op:(OffPtr [o1] p1)
+ (Store {t2} p2 _
+ (Store {t3} p3 _
+ (Store {t4} p4 _
+ mem:(Zero [n] p5 _)))))
+ && o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5)
+ && disjoint(op, t1.Size(), p2, sizeof(t2))
+ && disjoint(op, t1.Size(), p3, sizeof(t3))
+ && disjoint(op, t1.Size(), p4, sizeof(t4))
+ -> @mem.Block (Load <t1> op mem)
+(Load <t1> op:(OffPtr [o1] p1)
+ (Store {t2} p2 _
+ (Store {t3} p3 _
+ (Store {t4} p4 _
+ (Store {t5} p5 _
+ mem:(Zero [n] p6 _))))))
+ && o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6)
+ && disjoint(op, t1.Size(), p2, sizeof(t2))
+ && disjoint(op, t1.Size(), p3, sizeof(t3))
+ && disjoint(op, t1.Size(), p4, sizeof(t4))
+ && disjoint(op, t1.Size(), p5, sizeof(t5))
+ -> @mem.Block (Load <t1> op mem)
+
+// Zero to Load forwarding.
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && t1.IsBoolean()
+ && isSamePtr(p1, p2)
+ && n >= o + 1
+ -> (ConstBool [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is8BitInt(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 1
+ -> (Const8 [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is16BitInt(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 2
+ -> (Const16 [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is32BitInt(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 4
+ -> (Const32 [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is64BitInt(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 8
+ -> (Const64 [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is32BitFloat(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 4
+ -> (Const32F [0])
+(Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ && is64BitFloat(t1)
+ && isSamePtr(p1, p2)
+ && n >= o + 8
+ -> (Const64F [0])
// Eliminate stores of values that have just been loaded from the same location.
-// We also handle the common case where there are some intermediate stores to non-overlapping struct fields.
-(Store {t1} p1 (Load <t2> p2 mem) mem) &&
- isSamePtr(p1, p2) &&
- t2.Size() == t1.(*types.Type).Size() -> mem
-(Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ oldmem)) &&
- isSamePtr(p1, p2) &&
- isSamePtr(p1, p3) &&
- t2.Size() == t1.(*types.Type).Size() &&
- !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) -> mem
-(Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ oldmem))) &&
- isSamePtr(p1, p2) &&
- isSamePtr(p1, p3) &&
- isSamePtr(p1, p4) &&
- t2.Size() == t1.(*types.Type).Size() &&
- !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) &&
- !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) -> mem
-(Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ (Store {t5} (OffPtr [o5] p5) _ oldmem)))) &&
- isSamePtr(p1, p2) &&
- isSamePtr(p1, p3) &&
- isSamePtr(p1, p4) &&
- isSamePtr(p1, p5) &&
- t2.Size() == t1.(*types.Type).Size() &&
- !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) &&
- !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) &&
- !overlap(o1, t2.Size(), o5, t5.(*types.Type).Size()) -> mem
+// We also handle the common case where there are some intermediate stores.
+(Store {t1} p1 (Load <t2> p2 mem) mem)
+ && isSamePtr(p1, p2)
+ && t2.Size() == sizeof(t1)
+ -> mem
+(Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
+ && isSamePtr(p1, p2)
+ && t2.Size() == sizeof(t1)
+ && disjoint(p1, sizeof(t1), p3, sizeof(t3))
+ -> mem
+(Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
+ && isSamePtr(p1, p2)
+ && t2.Size() == sizeof(t1)
+ && disjoint(p1, sizeof(t1), p3, sizeof(t3))
+ && disjoint(p1, sizeof(t1), p4, sizeof(t4))
+ -> mem
+(Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
+ && isSamePtr(p1, p2)
+ && t2.Size() == sizeof(t1)
+ && disjoint(p1, sizeof(t1), p3, sizeof(t3))
+ && disjoint(p1, sizeof(t1), p4, sizeof(t4))
+ && disjoint(p1, sizeof(t1), p5, sizeof(t5))
+ -> mem
+
+// Don't Store zeros to cleared variables.
+(Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
+ && isConstZero(x)
+ && o >= 0 && sizeof(t) + o <= n && isSamePtr(p1, p2)
+ -> mem
+(Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
+ && isConstZero(x)
+ && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p3)
+ && disjoint(op, sizeof(t1), p2, sizeof(t2))
+ -> mem
+(Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
+ && isConstZero(x)
+ && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p4)
+ && disjoint(op, sizeof(t1), p2, sizeof(t2))
+ && disjoint(op, sizeof(t1), p3, sizeof(t3))
+ -> mem
+(Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
+ && isConstZero(x)
+ && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p5)
+ && disjoint(op, sizeof(t1), p2, sizeof(t2))
+ && disjoint(op, sizeof(t1), p3, sizeof(t3))
+ && disjoint(op, sizeof(t1), p4, sizeof(t4))
+ -> mem
// Collapse OffPtr
(OffPtr (OffPtr p [b]) [a]) -> (OffPtr p [a+b])
// un-SSAable values use mem->mem copies
(Store {t} dst (Load src mem) mem) && !fe.CanSSA(t.(*types.Type)) ->
- (Move {t} [t.(*types.Type).Size()] dst src mem)
+ (Move {t} [sizeof(t)] dst src mem)
(Store {t} dst (Load src mem) (VarDef {x} mem)) && !fe.CanSSA(t.(*types.Type)) ->
- (Move {t} [t.(*types.Type).Size()] dst src (VarDef {x} mem))
+ (Move {t} [sizeof(t)] dst src (VarDef {x} mem))
// array ops
(ArraySelect (ArrayMake1 x)) -> x
// so this rule should trigger reliably.
(InterCall [argsize] (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) mem) && devirt(v, itab, off) != nil ->
(StaticCall [argsize] {devirt(v, itab, off)} mem)
+
+// Move and Zero optimizations.
+// Move source and destination may overlap.
+
+// Convert Moves into Zeros when the source is known to be zeros.
+(Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _)) && isSamePtr(src, dst2)
+ -> (Zero {t} [n] dst1 mem)
+(Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _))) && isSamePtr(src, dst0)
+ -> (Zero {t} [n] dst1 mem)
+
+// Don't Store to variables that are about to be overwritten by Move/Zero.
+(Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
+ && isSamePtr(p1, p2) && store.Uses == 1
+ && n >= o2 + sizeof(t2)
+ && clobber(store)
+ -> (Zero {t1} [n] p1 mem)
+(Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
+ && isSamePtr(dst1, dst2) && store.Uses == 1
+ && n >= o2 + sizeof(t2)
+ && disjoint(src1, n, op, sizeof(t2))
+ && clobber(store)
+ -> (Move {t1} [n] dst1 src1 mem)
+
+// Don't Move to variables that are immediately completely overwritten.
+(Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
+ && move.Uses == 1
+ && isSamePtr(dst1, dst2)
+ && clobber(move)
+ -> (Zero {t} [n] dst1 mem)
+(Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
+ && move.Uses == 1
+ && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n)
+ && clobber(move)
+ -> (Move {t} [n] dst1 src1 mem)
+(Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
+ && move.Uses == 1 && vardef.Uses == 1
+ && isSamePtr(dst1, dst2)
+ && clobber(move) && clobber(vardef)
+ -> (Zero {t} [n] dst1 (VarDef {x} mem))
+(Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
+ && move.Uses == 1 && vardef.Uses == 1
+ && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n)
+ && clobber(move) && clobber(vardef)
+ -> (Move {t} [n] dst1 src1 (VarDef {x} mem))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [0] p2) d2
+ m3:(Move [n] p3 _ mem)))
+ && m2.Uses == 1 && m3.Uses == 1
+ && o1 == sizeof(t2)
+ && n == sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && clobber(m2) && clobber(m3)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [o2] p2) d2
+ m3:(Store {t3} op3:(OffPtr [0] p3) d3
+ m4:(Move [n] p4 _ mem))))
+ && m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1
+ && o2 == sizeof(t3)
+ && o1-o2 == sizeof(t2)
+ && n == sizeof(t3) + sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && clobber(m2) && clobber(m3) && clobber(m4)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [o2] p2) d2
+ m3:(Store {t3} op3:(OffPtr [o3] p3) d3
+ m4:(Store {t4} op4:(OffPtr [0] p4) d4
+ m5:(Move [n] p5 _ mem)))))
+ && m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1
+ && o3 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && o1-o2 == sizeof(t2)
+ && n == sizeof(t4) + sizeof(t3) + sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
+
+// Don't Zero variables that are immediately completely overwritten
+// before being accessed.
+(Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
+ && zero.Uses == 1
+ && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n)
+ && clobber(zero)
+ -> (Move {t} [n] dst1 src1 mem)
+(Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
+ && zero.Uses == 1 && vardef.Uses == 1
+ && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n)
+ && clobber(zero) && clobber(vardef)
+ -> (Move {t} [n] dst1 src1 (VarDef {x} mem))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [0] p2) d2
+ m3:(Zero [n] p3 mem)))
+ && m2.Uses == 1 && m3.Uses == 1
+ && o1 == sizeof(t2)
+ && n == sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && clobber(m2) && clobber(m3)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [o2] p2) d2
+ m3:(Store {t3} op3:(OffPtr [0] p3) d3
+ m4:(Zero [n] p4 mem))))
+ && m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1
+ && o2 == sizeof(t3)
+ && o1-o2 == sizeof(t2)
+ && n == sizeof(t3) + sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && clobber(m2) && clobber(m3) && clobber(m4)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
+(Store {t1} op1:(OffPtr [o1] p1) d1
+ m2:(Store {t2} op2:(OffPtr [o2] p2) d2
+ m3:(Store {t3} op3:(OffPtr [o3] p3) d3
+ m4:(Store {t4} op4:(OffPtr [0] p4) d4
+ m5:(Zero [n] p5 mem)))))
+ && m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1
+ && o3 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && o1-o2 == sizeof(t2)
+ && n == sizeof(t4) + sizeof(t3) + sizeof(t2) + sizeof(t1)
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)
+ -> (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
+
+// Don't Move from memory if the values are likely to already be
+// in registers.
+(Move {t1} [n] dst p1
+ mem:(Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [0] p3) d2 _)))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && o2 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [0] dst) d2 mem))
+(Move {t1} [n] dst p1
+ mem:(Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [o3] p3) d2
+ (Store {t4} op4:(OffPtr [0] p4) d3 _))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && o3 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3) + sizeof(t4)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2
+ (Store {t4} (OffPtr <t4.(*types.Type)> [0] dst) d3 mem)))
+(Move {t1} [n] dst p1
+ mem:(Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [o3] p3) d2
+ (Store {t4} op4:(OffPtr [o4] p4) d3
+ (Store {t5} op5:(OffPtr [0] p5) d4 _)))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && registerizable(b, t5)
+ && o4 == sizeof(t5)
+ && o3-o4 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3) + sizeof(t4) + sizeof(t5)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2
+ (Store {t4} (OffPtr <t4.(*types.Type)> [o4] dst) d3
+ (Store {t5} (OffPtr <t5.(*types.Type)> [0] dst) d4 mem))))
+
+// Same thing but with VarDef in the middle.
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [0] p3) d2 _))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && o2 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [0] dst) d2 mem))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [o3] p3) d2
+ (Store {t4} op4:(OffPtr [0] p4) d3 _)))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && o3 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3) + sizeof(t4)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2
+ (Store {t4} (OffPtr <t4.(*types.Type)> [0] dst) d3 mem)))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} op2:(OffPtr [o2] p2) d1
+ (Store {t3} op3:(OffPtr [o3] p3) d2
+ (Store {t4} op4:(OffPtr [o4] p4) d3
+ (Store {t5} op5:(OffPtr [0] p5) d4 _))))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && registerizable(b, t5)
+ && o4 == sizeof(t5)
+ && o3-o4 == sizeof(t4)
+ && o2-o3 == sizeof(t3)
+ && n == sizeof(t2) + sizeof(t3) + sizeof(t4) + sizeof(t5)
+ -> (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1
+ (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2
+ (Store {t4} (OffPtr <t4.(*types.Type)> [o4] dst) d3
+ (Store {t5} (OffPtr <t5.(*types.Type)> [0] dst) d4 mem))))
+
+// Prefer to Zero and Store than to Move.
+(Move {t1} [n] dst p1
+ mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1
+ (Zero {t3} [n] p3 _)))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && registerizable(b, t2)
+ && n >= o2 + sizeof(t2)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Zero {t1} [n] dst mem))
+(Move {t1} [n] dst p1
+ mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Zero {t4} [n] p4 _))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Zero {t1} [n] dst mem)))
+(Move {t1} [n] dst p1
+ mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Store {t4} (OffPtr <tt4> [o4] p4) d3
+ (Zero {t5} [n] p5 _)))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ && n >= o4 + sizeof(t4)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Store {t4} (OffPtr <tt4> [o4] dst) d3
+ (Zero {t1} [n] dst mem))))
+(Move {t1} [n] dst p1
+ mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Store {t4} (OffPtr <tt4> [o4] p4) d3
+ (Store {t5} (OffPtr <tt5> [o5] p5) d4
+ (Zero {t6} [n] p6 _))))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && alignof(t6) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && registerizable(b, t5)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ && n >= o4 + sizeof(t4)
+ && n >= o5 + sizeof(t5)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Store {t4} (OffPtr <tt4> [o4] dst) d3
+ (Store {t5} (OffPtr <tt5> [o5] dst) d4
+ (Zero {t1} [n] dst mem)))))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1
+ (Zero {t3} [n] p3 _))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && registerizable(b, t2)
+ && n >= o2 + sizeof(t2)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Zero {t1} [n] dst mem))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Zero {t4} [n] p4 _)))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Zero {t1} [n] dst mem)))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Store {t4} (OffPtr <tt4> [o4] p4) d3
+ (Zero {t5} [n] p5 _))))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ && n >= o4 + sizeof(t4)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Store {t4} (OffPtr <tt4> [o4] dst) d3
+ (Zero {t1} [n] dst mem))))
+(Move {t1} [n] dst p1
+ mem:(VarDef
+ (Store {t2} (OffPtr <tt2> [o2] p2) d1
+ (Store {t3} (OffPtr <tt3> [o3] p3) d2
+ (Store {t4} (OffPtr <tt4> [o4] p4) d3
+ (Store {t5} (OffPtr <tt5> [o5] p5) d4
+ (Zero {t6} [n] p6 _)))))))
+ && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6)
+ && alignof(t2) <= alignof(t1)
+ && alignof(t3) <= alignof(t1)
+ && alignof(t4) <= alignof(t1)
+ && alignof(t5) <= alignof(t1)
+ && alignof(t6) <= alignof(t1)
+ && registerizable(b, t2)
+ && registerizable(b, t3)
+ && registerizable(b, t4)
+ && registerizable(b, t5)
+ && n >= o2 + sizeof(t2)
+ && n >= o3 + sizeof(t3)
+ && n >= o4 + sizeof(t4)
+ && n >= o5 + sizeof(t5)
+ -> (Store {t2} (OffPtr <tt2> [o2] dst) d1
+ (Store {t3} (OffPtr <tt3> [o3] dst) d2
+ (Store {t4} (OffPtr <tt4> [o4] dst) d3
+ (Store {t5} (OffPtr <tt5> [o5] dst) d4
+ (Zero {t1} [n] dst mem)))))
case OpLess8U:
return rewriteValuegeneric_OpLess8U_0(v)
case OpLoad:
- return rewriteValuegeneric_OpLoad_0(v) || rewriteValuegeneric_OpLoad_10(v)
+ return rewriteValuegeneric_OpLoad_0(v) || rewriteValuegeneric_OpLoad_10(v) || rewriteValuegeneric_OpLoad_20(v)
case OpLsh16x16:
return rewriteValuegeneric_OpLsh16x16_0(v)
case OpLsh16x32:
return rewriteValuegeneric_OpMod8_0(v)
case OpMod8u:
return rewriteValuegeneric_OpMod8u_0(v)
+ case OpMove:
+ return rewriteValuegeneric_OpMove_0(v) || rewriteValuegeneric_OpMove_10(v) || rewriteValuegeneric_OpMove_20(v)
case OpMul16:
return rewriteValuegeneric_OpMul16_0(v) || rewriteValuegeneric_OpMul16_10(v)
case OpMul32:
case OpStaticCall:
return rewriteValuegeneric_OpStaticCall_0(v)
case OpStore:
- return rewriteValuegeneric_OpStore_0(v) || rewriteValuegeneric_OpStore_10(v)
+ return rewriteValuegeneric_OpStore_0(v) || rewriteValuegeneric_OpStore_10(v) || rewriteValuegeneric_OpStore_20(v)
case OpStringLen:
return rewriteValuegeneric_OpStringLen_0(v)
case OpStringPtr:
func rewriteValuegeneric_OpLoad_0(v *Value) bool {
b := v.Block
_ = b
- fe := b.Func.fe
- _ = fe
// match: (Load <t1> p1 (Store {t2} p2 x _))
- // cond: isSamePtr(p1,p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.(*types.Type).Size()
+ // cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2)
// result: x
for {
t1 := v.Type
_ = v_1.Args[2]
p2 := v_1.Args[0]
x := v_1.Args[1]
- if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.(*types.Type).Size()) {
+ if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2)) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
+ // cond: isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p3, sizeof(t3), p2, sizeof(t2))
+ // result: x
+ for {
+ t1 := v.Type
+ _ = v.Args[1]
+ p1 := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
+ break
+ }
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
+ break
+ }
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ x := v_1_2.Args[1]
+ if !(isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p3, sizeof(t3), p2, sizeof(t2))) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
+ // cond: isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p4, sizeof(t4), p2, sizeof(t2)) && disjoint(p4, sizeof(t4), p3, sizeof(t3))
+ // result: x
+ for {
+ t1 := v.Type
+ _ = v.Args[1]
+ p1 := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
+ break
+ }
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
+ break
+ }
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ v_1_2_2 := v_1_2.Args[2]
+ if v_1_2_2.Op != OpStore {
+ break
+ }
+ t4 := v_1_2_2.Aux
+ _ = v_1_2_2.Args[2]
+ p4 := v_1_2_2.Args[0]
+ x := v_1_2_2.Args[1]
+ if !(isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p4, sizeof(t4), p2, sizeof(t2)) && disjoint(p4, sizeof(t4), p3, sizeof(t3))) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
+ // cond: isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p5, sizeof(t5), p2, sizeof(t2)) && disjoint(p5, sizeof(t5), p3, sizeof(t3)) && disjoint(p5, sizeof(t5), p4, sizeof(t4))
+ // result: x
+ for {
+ t1 := v.Type
+ _ = v.Args[1]
+ p1 := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
+ break
+ }
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
+ break
+ }
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ v_1_2_2 := v_1_2.Args[2]
+ if v_1_2_2.Op != OpStore {
+ break
+ }
+ t4 := v_1_2_2.Aux
+ _ = v_1_2_2.Args[2]
+ p4 := v_1_2_2.Args[0]
+ v_1_2_2_2 := v_1_2_2.Args[2]
+ if v_1_2_2_2.Op != OpStore {
+ break
+ }
+ t5 := v_1_2_2_2.Aux
+ _ = v_1_2_2_2.Args[2]
+ p5 := v_1_2_2_2.Args[0]
+ x := v_1_2_2_2.Args[1]
+ if !(isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2) && disjoint(p5, sizeof(t5), p2, sizeof(t2)) && disjoint(p5, sizeof(t5), p3, sizeof(t3)) && disjoint(p5, sizeof(t5), p4, sizeof(t4))) {
break
}
v.reset(OpCopy)
return true
}
// match: (Load <t1> p1 (Store {t2} p2 (Const64 [x]) _))
- // cond: isSamePtr(p1,p2) && t2.(*types.Type).Size() == 8 && is64BitFloat(t1)
+ // cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitFloat(t1)
// result: (Const64F [x])
for {
t1 := v.Type
break
}
x := v_1_1.AuxInt
- if !(isSamePtr(p1, p2) && t2.(*types.Type).Size() == 8 && is64BitFloat(t1)) {
+ if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitFloat(t1)) {
break
}
v.reset(OpConst64F)
return true
}
// match: (Load <t1> p1 (Store {t2} p2 (Const32 [x]) _))
- // cond: isSamePtr(p1,p2) && t2.(*types.Type).Size() == 4 && is32BitFloat(t1)
+ // cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitFloat(t1)
// result: (Const32F [f2i(float64(math.Float32frombits(uint32(x))))])
for {
t1 := v.Type
break
}
x := v_1_1.AuxInt
- if !(isSamePtr(p1, p2) && t2.(*types.Type).Size() == 4 && is32BitFloat(t1)) {
+ if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitFloat(t1)) {
break
}
v.reset(OpConst32F)
return true
}
// match: (Load <t1> p1 (Store {t2} p2 (Const64F [x]) _))
- // cond: isSamePtr(p1,p2) && t2.(*types.Type).Size() == 8 && is64BitInt(t1)
+ // cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitInt(t1)
// result: (Const64 [x])
for {
t1 := v.Type
break
}
x := v_1_1.AuxInt
- if !(isSamePtr(p1, p2) && t2.(*types.Type).Size() == 8 && is64BitInt(t1)) {
+ if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitInt(t1)) {
break
}
v.reset(OpConst64)
return true
}
// match: (Load <t1> p1 (Store {t2} p2 (Const32F [x]) _))
- // cond: isSamePtr(p1,p2) && t2.(*types.Type).Size() == 4 && is32BitInt(t1)
+ // cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitInt(t1)
// result: (Const32 [int64(int32(math.Float32bits(float32(i2f(x)))))])
for {
t1 := v.Type
break
}
x := v_1_1.AuxInt
- if !(isSamePtr(p1, p2) && t2.(*types.Type).Size() == 4 && is32BitInt(t1)) {
+ if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitInt(t1)) {
break
}
v.reset(OpConst32)
v.AuxInt = int64(int32(math.Float32bits(float32(i2f(x)))))
return true
}
- // match: (Load <t> _ _)
- // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)
- // result: (StructMake0)
+ // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ mem:(Zero [n] p3 _)))
+ // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, sizeof(t2))
+ // result: @mem.Block (Load <t1> op mem)
for {
- t := v.Type
+ t1 := v.Type
_ = v.Args[1]
- if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) {
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
break
}
- v.reset(OpStructMake0)
- return true
- }
- // match: (Load <t> ptr mem)
- // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
- // result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
- for {
- t := v.Type
- _ = v.Args[1]
- ptr := v.Args[0]
- mem := v.Args[1]
- if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) {
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
break
}
- v.reset(OpStructMake1)
- v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
- v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
- v1.AuxInt = 0
- v1.AddArg(ptr)
- v0.AddArg(v1)
- v0.AddArg(mem)
- v.AddArg(v0)
- return true
- }
- // match: (Load <t> ptr mem)
- // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)
- // result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
- for {
- t := v.Type
- _ = v.Args[1]
- ptr := v.Args[0]
- mem := v.Args[1]
- if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) {
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ mem := v_1.Args[2]
+ if mem.Op != OpZero {
break
}
- v.reset(OpStructMake2)
- v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
- v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
- v1.AuxInt = 0
- v1.AddArg(ptr)
- v0.AddArg(v1)
- v0.AddArg(mem)
- v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
- v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
- v3.AuxInt = t.FieldOff(1)
- v3.AddArg(ptr)
- v2.AddArg(v3)
- v2.AddArg(mem)
- v.AddArg(v2)
- return true
- }
- // match: (Load <t> ptr mem)
- // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)
- // result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
- for {
- t := v.Type
- _ = v.Args[1]
- ptr := v.Args[0]
- mem := v.Args[1]
- if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) {
+ n := mem.AuxInt
+ _ = mem.Args[1]
+ p3 := mem.Args[0]
+ if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, sizeof(t2))) {
break
}
- v.reset(OpStructMake3)
- v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
- v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
- v1.AuxInt = 0
- v1.AddArg(ptr)
- v0.AddArg(v1)
- v0.AddArg(mem)
+ b = mem.Block
+ v0 := b.NewValue0(v.Pos, OpLoad, t1)
+ v.reset(OpCopy)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
- v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
- v3.AuxInt = t.FieldOff(1)
- v3.AddArg(ptr)
- v2.AddArg(v3)
- v2.AddArg(mem)
- v.AddArg(v2)
- v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
- v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
- v5.AuxInt = t.FieldOff(2)
- v5.AddArg(ptr)
- v4.AddArg(v5)
- v4.AddArg(mem)
- v.AddArg(v4)
+ v0.AddArg(op)
+ v0.AddArg(mem)
return true
}
- // match: (Load <t> ptr mem)
- // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)
- // result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
+ // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ mem:(Zero [n] p4 _))))
+ // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3))
+ // result: @mem.Block (Load <t1> op mem)
for {
- t := v.Type
+ t1 := v.Type
_ = v.Args[1]
- ptr := v.Args[0]
- mem := v.Args[1]
- if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) {
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
break
}
- v.reset(OpStructMake4)
- v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
- v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
- v1.AuxInt = 0
- v1.AddArg(ptr)
- v0.AddArg(v1)
- v0.AddArg(mem)
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
+ break
+ }
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
+ break
+ }
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ mem := v_1_2.Args[2]
+ if mem.Op != OpZero {
+ break
+ }
+ n := mem.AuxInt
+ _ = mem.Args[1]
+ p4 := mem.Args[0]
+ if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3))) {
+ break
+ }
+ b = mem.Block
+ v0 := b.NewValue0(v.Pos, OpLoad, t1)
+ v.reset(OpCopy)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
- v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
- v3.AuxInt = t.FieldOff(1)
- v3.AddArg(ptr)
- v2.AddArg(v3)
- v2.AddArg(mem)
- v.AddArg(v2)
- v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
- v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
- v5.AuxInt = t.FieldOff(2)
- v5.AddArg(ptr)
- v4.AddArg(v5)
- v4.AddArg(mem)
- v.AddArg(v4)
- v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
- v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
- v7.AuxInt = t.FieldOff(3)
- v7.AddArg(ptr)
- v6.AddArg(v7)
- v6.AddArg(mem)
- v.AddArg(v6)
+ v0.AddArg(op)
+ v0.AddArg(mem)
return true
}
return false
_ = b
fe := b.Func.fe
_ = fe
- // match: (Load <t> _ _)
- // cond: t.IsArray() && t.NumElem() == 0
- // result: (ArrayMake0)
+ // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
+ // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3)) && disjoint(op, t1.Size(), p4, sizeof(t4))
+ // result: @mem.Block (Load <t1> op mem)
for {
- t := v.Type
+ t1 := v.Type
_ = v.Args[1]
- if !(t.IsArray() && t.NumElem() == 0) {
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
break
}
- v.reset(OpArrayMake0)
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpStore {
+ break
+ }
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
+ break
+ }
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ v_1_2_2 := v_1_2.Args[2]
+ if v_1_2_2.Op != OpStore {
+ break
+ }
+ t4 := v_1_2_2.Aux
+ _ = v_1_2_2.Args[2]
+ p4 := v_1_2_2.Args[0]
+ mem := v_1_2_2.Args[2]
+ if mem.Op != OpZero {
+ break
+ }
+ n := mem.AuxInt
+ _ = mem.Args[1]
+ p5 := mem.Args[0]
+ if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3)) && disjoint(op, t1.Size(), p4, sizeof(t4))) {
+ break
+ }
+ b = mem.Block
+ v0 := b.NewValue0(v.Pos, OpLoad, t1)
+ v.reset(OpCopy)
+ v.AddArg(v0)
+ v0.AddArg(op)
+ v0.AddArg(mem)
return true
}
- // match: (Load <t> ptr mem)
- // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)
- // result: (ArrayMake1 (Load <t.Elem()> ptr mem))
+ // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ mem:(Zero [n] p6 _))))))
+ // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3)) && disjoint(op, t1.Size(), p4, sizeof(t4)) && disjoint(op, t1.Size(), p5, sizeof(t5))
+ // result: @mem.Block (Load <t1> op mem)
for {
- t := v.Type
+ t1 := v.Type
_ = v.Args[1]
- ptr := v.Args[0]
- mem := v.Args[1]
- if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) {
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
break
}
- v.reset(OpArrayMake1)
- v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
- v0.AddArg(ptr)
- v0.AddArg(mem)
- v.AddArg(v0)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpLsh16x16_0(v *Value) bool {
- b := v.Block
- _ = b
- // match: (Lsh16x16 <t> x (Const16 [c]))
- // cond:
- // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
- for {
- t := v.Type
- _ = v.Args[1]
- x := v.Args[0]
+ o1 := op.AuxInt
+ p1 := op.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpStore {
break
}
- c := v_1.AuxInt
- v.reset(OpLsh16x64)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = int64(uint16(c))
- v.AddArg(v0)
- return true
- }
- // match: (Lsh16x16 (Const16 [0]) _)
- // cond:
- // result: (Const16 [0])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ t2 := v_1.Aux
+ _ = v_1.Args[2]
+ p2 := v_1.Args[0]
+ v_1_2 := v_1.Args[2]
+ if v_1_2.Op != OpStore {
break
}
- if v_0.AuxInt != 0 {
+ t3 := v_1_2.Aux
+ _ = v_1_2.Args[2]
+ p3 := v_1_2.Args[0]
+ v_1_2_2 := v_1_2.Args[2]
+ if v_1_2_2.Op != OpStore {
break
}
- v.reset(OpConst16)
- v.AuxInt = 0
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpLsh16x32_0(v *Value) bool {
- b := v.Block
- _ = b
- // match: (Lsh16x32 <t> x (Const32 [c]))
- // cond:
- // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
- for {
- t := v.Type
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ t4 := v_1_2_2.Aux
+ _ = v_1_2_2.Args[2]
+ p4 := v_1_2_2.Args[0]
+ v_1_2_2_2 := v_1_2_2.Args[2]
+ if v_1_2_2_2.Op != OpStore {
break
}
- c := v_1.AuxInt
- v.reset(OpLsh16x64)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = int64(uint32(c))
- v.AddArg(v0)
- return true
- }
- // match: (Lsh16x32 (Const16 [0]) _)
- // cond:
- // result: (Const16 [0])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ t5 := v_1_2_2_2.Aux
+ _ = v_1_2_2_2.Args[2]
+ p5 := v_1_2_2_2.Args[0]
+ mem := v_1_2_2_2.Args[2]
+ if mem.Op != OpZero {
break
}
- if v_0.AuxInt != 0 {
+ n := mem.AuxInt
+ _ = mem.Args[1]
+ p6 := mem.Args[0]
+ if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3)) && disjoint(op, t1.Size(), p4, sizeof(t4)) && disjoint(op, t1.Size(), p5, sizeof(t5))) {
break
}
- v.reset(OpConst16)
- v.AuxInt = 0
+ b = mem.Block
+ v0 := b.NewValue0(v.Pos, OpLoad, t1)
+ v.reset(OpCopy)
+ v.AddArg(v0)
+ v0.AddArg(op)
+ v0.AddArg(mem)
return true
}
- return false
-}
-func rewriteValuegeneric_OpLsh16x64_0(v *Value) bool {
- b := v.Block
- _ = b
- typ := &b.Func.Config.Types
- _ = typ
- // match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
- // cond:
- // result: (Const16 [int64(int16(c) << uint64(d))])
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: t1.IsBoolean() && isSamePtr(p1, p2) && n >= o + 1
+ // result: (ConstBool [0])
for {
+ t1 := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ if v_0.Op != OpOffPtr {
break
}
- c := v_0.AuxInt
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpZero {
break
}
- d := v_1.AuxInt
- v.reset(OpConst16)
- v.AuxInt = int64(int16(c) << uint64(d))
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(t1.IsBoolean() && isSamePtr(p1, p2) && n >= o+1) {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 0
return true
}
- // match: (Lsh16x64 x (Const64 [0]))
- // cond:
- // result: x
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is8BitInt(t1) && isSamePtr(p1, p2) && n >= o + 1
+ // result: (Const8 [0])
for {
+ t1 := v.Type
_ = v.Args[1]
- x := v.Args[0]
+ v_0 := v.Args[0]
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpZero {
break
}
- if v_1.AuxInt != 0 {
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is8BitInt(t1) && isSamePtr(p1, p2) && n >= o+1) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
+ v.reset(OpConst8)
+ v.AuxInt = 0
return true
}
- // match: (Lsh16x64 (Const16 [0]) _)
- // cond:
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is16BitInt(t1) && isSamePtr(p1, p2) && n >= o + 2
// result: (Const16 [0])
for {
+ t1 := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ if v_0.Op != OpOffPtr {
break
}
- if v_0.AuxInt != 0 {
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpZero {
+ break
+ }
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is16BitInt(t1) && isSamePtr(p1, p2) && n >= o+2) {
break
}
v.reset(OpConst16)
v.AuxInt = 0
return true
}
- // match: (Lsh16x64 _ (Const64 [c]))
- // cond: uint64(c) >= 16
- // result: (Const16 [0])
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is32BitInt(t1) && isSamePtr(p1, p2) && n >= o + 4
+ // result: (Const32 [0])
for {
+ t1 := v.Type
_ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpZero {
break
}
- c := v_1.AuxInt
- if !(uint64(c) >= 16) {
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is32BitInt(t1) && isSamePtr(p1, p2) && n >= o+4) {
break
}
- v.reset(OpConst16)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
- // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
- // cond: !uaddOvf(c,d)
- // result: (Lsh16x64 x (Const64 <t> [c+d]))
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is64BitInt(t1) && isSamePtr(p1, p2) && n >= o + 8
+ // result: (Const64 [0])
for {
- t := v.Type
+ t1 := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh16x64 {
- break
- }
- _ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0.Op != OpOffPtr {
break
}
- c := v_0_1.AuxInt
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpZero {
break
}
- d := v_1.AuxInt
- if !(!uaddOvf(c, d)) {
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is64BitInt(t1) && isSamePtr(p1, p2) && n >= o+8) {
break
}
- v.reset(OpLsh16x64)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c + d
- v.AddArg(v0)
+ v.reset(OpConst64)
+ v.AuxInt = 0
return true
}
- // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
- // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
- // result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 4
+ // result: (Const32F [0])
for {
+ t1 := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpRsh16Ux64 {
+ if v_0.Op != OpOffPtr {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpLsh16x64 {
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpZero {
break
}
- _ = v_0_0.Args[1]
- x := v_0_0.Args[0]
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o+4) {
break
}
- c1 := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ v.reset(OpConst32F)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
+ // cond: is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 8
+ // result: (Const64F [0])
+ for {
+ t1 := v.Type
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpOffPtr {
break
}
- c2 := v_0_1.AuxInt
+ o := v_0.AuxInt
+ p1 := v_0.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpZero {
break
}
- c3 := v_1.AuxInt
- if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
+ n := v_1.AuxInt
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
+ if !(is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o+8) {
break
}
- v.reset(OpLsh16x64)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v0.AuxInt = c1 - c2 + c3
- v.AddArg(v0)
+ v.reset(OpConst64F)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Load <t> _ _)
+ // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)
+ // result: (StructMake0)
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) {
+ break
+ }
+ v.reset(OpStructMake0)
return true
}
return false
}
-func rewriteValuegeneric_OpLsh16x8_0(v *Value) bool {
+func rewriteValuegeneric_OpLoad_20(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh16x8 <t> x (Const8 [c]))
- // cond:
- // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
+ fe := b.Func.fe
+ _ = fe
+ // match: (Load <t> ptr mem)
+ // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
+ // result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
for {
t := v.Type
_ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) {
break
}
- c := v_1.AuxInt
- v.reset(OpLsh16x64)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = int64(uint8(c))
+ v.reset(OpStructMake1)
+ v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
+ v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
+ v1.AuxInt = 0
+ v1.AddArg(ptr)
+ v0.AddArg(v1)
+ v0.AddArg(mem)
v.AddArg(v0)
return true
}
- // match: (Lsh16x8 (Const16 [0]) _)
- // cond:
- // result: (Const16 [0])
+ // match: (Load <t> ptr mem)
+ // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)
+ // result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
for {
+ t := v.Type
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) {
break
}
- if v_0.AuxInt != 0 {
+ v.reset(OpStructMake2)
+ v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
+ v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
+ v1.AuxInt = 0
+ v1.AddArg(ptr)
+ v0.AddArg(v1)
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
+ v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
+ v3.AuxInt = t.FieldOff(1)
+ v3.AddArg(ptr)
+ v2.AddArg(v3)
+ v2.AddArg(mem)
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)
+ // result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) {
break
}
- v.reset(OpConst16)
- v.AuxInt = 0
+ v.reset(OpStructMake3)
+ v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
+ v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
+ v1.AuxInt = 0
+ v1.AddArg(ptr)
+ v0.AddArg(v1)
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
+ v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
+ v3.AuxInt = t.FieldOff(1)
+ v3.AddArg(ptr)
+ v2.AddArg(v3)
+ v2.AddArg(mem)
+ v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
+ v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
+ v5.AuxInt = t.FieldOff(2)
+ v5.AddArg(ptr)
+ v4.AddArg(v5)
+ v4.AddArg(mem)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)
+ // result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) {
+ break
+ }
+ v.reset(OpStructMake4)
+ v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
+ v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
+ v1.AuxInt = 0
+ v1.AddArg(ptr)
+ v0.AddArg(v1)
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
+ v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
+ v3.AuxInt = t.FieldOff(1)
+ v3.AddArg(ptr)
+ v2.AddArg(v3)
+ v2.AddArg(mem)
+ v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
+ v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
+ v5.AuxInt = t.FieldOff(2)
+ v5.AddArg(ptr)
+ v4.AddArg(v5)
+ v4.AddArg(mem)
+ v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
+ v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
+ v7.AuxInt = t.FieldOff(3)
+ v7.AddArg(ptr)
+ v6.AddArg(v7)
+ v6.AddArg(mem)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Load <t> _ _)
+ // cond: t.IsArray() && t.NumElem() == 0
+ // result: (ArrayMake0)
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ if !(t.IsArray() && t.NumElem() == 0) {
+ break
+ }
+ v.reset(OpArrayMake0)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)
+ // result: (ArrayMake1 (Load <t.Elem()> ptr mem))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) {
+ break
+ }
+ v.reset(OpArrayMake1)
+ v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
+ v0.AddArg(ptr)
+ v0.AddArg(mem)
+ v.AddArg(v0)
return true
}
return false
}
-func rewriteValuegeneric_OpLsh32x16_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh16x16_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh32x16 <t> x (Const16 [c]))
+ // match: (Lsh16x16 <t> x (Const16 [c]))
// cond:
- // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
+ // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh32x64)
+ v.reset(OpLsh16x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint16(c))
v.AddArg(v0)
return true
}
- // match: (Lsh32x16 (Const32 [0]) _)
+ // match: (Lsh16x16 (Const16 [0]) _)
// cond:
- // result: (Const32 [0])
+ // result: (Const16 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst32)
+ v.reset(OpConst16)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh32x32_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh16x32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh32x32 <t> x (Const32 [c]))
+ // match: (Lsh16x32 <t> x (Const32 [c]))
// cond:
- // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
+ // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh32x64)
+ v.reset(OpLsh16x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint32(c))
v.AddArg(v0)
return true
}
- // match: (Lsh32x32 (Const32 [0]) _)
+ // match: (Lsh16x32 (Const16 [0]) _)
// cond:
- // result: (Const32 [0])
+ // result: (Const16 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst32)
+ v.reset(OpConst16)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh32x64_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh16x64_0(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
+ // match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
// cond:
- // result: (Const32 [int64(int32(c) << uint64(d))])
+ // result: (Const16 [int64(int16(c) << uint64(d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
c := v_0.AuxInt
break
}
d := v_1.AuxInt
- v.reset(OpConst32)
- v.AuxInt = int64(int32(c) << uint64(d))
+ v.reset(OpConst16)
+ v.AuxInt = int64(int16(c) << uint64(d))
return true
}
- // match: (Lsh32x64 x (Const64 [0]))
+ // match: (Lsh16x64 x (Const64 [0]))
// cond:
// result: x
for {
v.AddArg(x)
return true
}
- // match: (Lsh32x64 (Const32 [0]) _)
+ // match: (Lsh16x64 (Const16 [0]) _)
// cond:
- // result: (Const32 [0])
+ // result: (Const16 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst32)
+ v.reset(OpConst16)
v.AuxInt = 0
return true
}
- // match: (Lsh32x64 _ (Const64 [c]))
- // cond: uint64(c) >= 32
- // result: (Const32 [0])
+ // match: (Lsh16x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 16
+ // result: (Const16 [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
break
}
c := v_1.AuxInt
- if !(uint64(c) >= 32) {
+ if !(uint64(c) >= 16) {
break
}
- v.reset(OpConst32)
+ v.reset(OpConst16)
v.AuxInt = 0
return true
}
- // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
+ // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
// cond: !uaddOvf(c,d)
- // result: (Lsh32x64 x (Const64 <t> [c+d]))
+ // result: (Lsh16x64 x (Const64 <t> [c+d]))
for {
t := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh32x64 {
+ if v_0.Op != OpLsh16x64 {
break
}
_ = v_0.Args[1]
if !(!uaddOvf(c, d)) {
break
}
- v.reset(OpLsh32x64)
+ v.reset(OpLsh16x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = c + d
v.AddArg(v0)
return true
}
- // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
- // result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
+ // result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpRsh32Ux64 {
+ if v_0.Op != OpRsh16Ux64 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpLsh32x64 {
+ if v_0_0.Op != OpLsh16x64 {
break
}
_ = v_0_0.Args[1]
if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
- v.reset(OpLsh32x64)
+ v.reset(OpLsh16x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v0.AuxInt = c1 - c2 + c3
}
return false
}
-func rewriteValuegeneric_OpLsh32x8_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh16x8_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh32x8 <t> x (Const8 [c]))
+ // match: (Lsh16x8 <t> x (Const8 [c]))
// cond:
- // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
+ // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh32x64)
+ v.reset(OpLsh16x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint8(c))
v.AddArg(v0)
return true
}
- // match: (Lsh32x8 (Const32 [0]) _)
+ // match: (Lsh16x8 (Const16 [0]) _)
// cond:
- // result: (Const32 [0])
+ // result: (Const16 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst32)
+ v.reset(OpConst16)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh64x16_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh32x16_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh64x16 <t> x (Const16 [c]))
+ // match: (Lsh32x16 <t> x (Const16 [c]))
// cond:
- // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
+ // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh64x64)
+ v.reset(OpLsh32x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint16(c))
v.AddArg(v0)
return true
}
- // match: (Lsh64x16 (Const64 [0]) _)
+ // match: (Lsh32x16 (Const32 [0]) _)
// cond:
- // result: (Const64 [0])
+ // result: (Const32 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst64)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh64x32_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh32x32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh64x32 <t> x (Const32 [c]))
+ // match: (Lsh32x32 <t> x (Const32 [c]))
// cond:
- // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
+ // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh64x64)
+ v.reset(OpLsh32x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint32(c))
v.AddArg(v0)
return true
}
- // match: (Lsh64x32 (Const64 [0]) _)
+ // match: (Lsh32x32 (Const32 [0]) _)
// cond:
- // result: (Const64 [0])
+ // result: (Const32 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst64)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh64x64_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh32x64_0(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
+ // match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
// cond:
- // result: (Const64 [c << uint64(d)])
+ // result: (Const32 [int64(int32(c) << uint64(d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
c := v_0.AuxInt
break
}
d := v_1.AuxInt
- v.reset(OpConst64)
- v.AuxInt = c << uint64(d)
+ v.reset(OpConst32)
+ v.AuxInt = int64(int32(c) << uint64(d))
return true
}
- // match: (Lsh64x64 x (Const64 [0]))
+ // match: (Lsh32x64 x (Const64 [0]))
// cond:
// result: x
for {
v.AddArg(x)
return true
}
- // match: (Lsh64x64 (Const64 [0]) _)
+ // match: (Lsh32x64 (Const32 [0]) _)
// cond:
- // result: (Const64 [0])
+ // result: (Const32 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst64)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
- // match: (Lsh64x64 _ (Const64 [c]))
- // cond: uint64(c) >= 64
- // result: (Const64 [0])
+ // match: (Lsh32x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 32
+ // result: (Const32 [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
break
}
c := v_1.AuxInt
- if !(uint64(c) >= 64) {
+ if !(uint64(c) >= 32) {
break
}
- v.reset(OpConst64)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
- // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
+ // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
// cond: !uaddOvf(c,d)
- // result: (Lsh64x64 x (Const64 <t> [c+d]))
+ // result: (Lsh32x64 x (Const64 <t> [c+d]))
for {
t := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh64x64 {
+ if v_0.Op != OpLsh32x64 {
break
}
_ = v_0.Args[1]
if !(!uaddOvf(c, d)) {
break
}
- v.reset(OpLsh64x64)
+ v.reset(OpLsh32x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = c + d
v.AddArg(v0)
return true
}
- // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
- // result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
+ // result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpRsh64Ux64 {
+ if v_0.Op != OpRsh32Ux64 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpLsh64x64 {
+ if v_0_0.Op != OpLsh32x64 {
break
}
_ = v_0_0.Args[1]
if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
- v.reset(OpLsh64x64)
+ v.reset(OpLsh32x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v0.AuxInt = c1 - c2 + c3
}
return false
}
-func rewriteValuegeneric_OpLsh64x8_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh32x8_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh64x8 <t> x (Const8 [c]))
+ // match: (Lsh32x8 <t> x (Const8 [c]))
// cond:
- // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
+ // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh64x64)
+ v.reset(OpLsh32x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint8(c))
v.AddArg(v0)
return true
}
- // match: (Lsh64x8 (Const64 [0]) _)
+ // match: (Lsh32x8 (Const32 [0]) _)
// cond:
- // result: (Const64 [0])
+ // result: (Const32 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst64)
+ v.reset(OpConst32)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh8x16_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh64x16_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh8x16 <t> x (Const16 [c]))
+ // match: (Lsh64x16 <t> x (Const16 [c]))
// cond:
- // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
+ // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh8x64)
+ v.reset(OpLsh64x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint16(c))
v.AddArg(v0)
return true
}
- // match: (Lsh8x16 (Const8 [0]) _)
+ // match: (Lsh64x16 (Const64 [0]) _)
// cond:
- // result: (Const8 [0])
+ // result: (Const64 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst64 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst8)
+ v.reset(OpConst64)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh8x32_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh64x32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh8x32 <t> x (Const32 [c]))
+ // match: (Lsh64x32 <t> x (Const32 [c]))
// cond:
- // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
+ // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh8x64)
+ v.reset(OpLsh64x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint32(c))
v.AddArg(v0)
return true
}
- // match: (Lsh8x32 (Const8 [0]) _)
+ // match: (Lsh64x32 (Const64 [0]) _)
// cond:
- // result: (Const8 [0])
+ // result: (Const64 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst64 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst8)
+ v.reset(OpConst64)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpLsh8x64_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh64x64_0(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
+ // match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
// cond:
- // result: (Const8 [int64(int8(c) << uint64(d))])
+ // result: (Const64 [c << uint64(d)])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst64 {
break
}
c := v_0.AuxInt
break
}
d := v_1.AuxInt
- v.reset(OpConst8)
- v.AuxInt = int64(int8(c) << uint64(d))
+ v.reset(OpConst64)
+ v.AuxInt = c << uint64(d)
return true
}
- // match: (Lsh8x64 x (Const64 [0]))
+ // match: (Lsh64x64 x (Const64 [0]))
// cond:
// result: x
for {
v.AddArg(x)
return true
}
- // match: (Lsh8x64 (Const8 [0]) _)
+ // match: (Lsh64x64 (Const64 [0]) _)
// cond:
- // result: (Const8 [0])
+ // result: (Const64 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst64 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst8)
+ v.reset(OpConst64)
v.AuxInt = 0
return true
}
- // match: (Lsh8x64 _ (Const64 [c]))
- // cond: uint64(c) >= 8
- // result: (Const8 [0])
+ // match: (Lsh64x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 64
+ // result: (Const64 [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
break
}
c := v_1.AuxInt
- if !(uint64(c) >= 8) {
+ if !(uint64(c) >= 64) {
break
}
- v.reset(OpConst8)
+ v.reset(OpConst64)
v.AuxInt = 0
return true
}
- // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
+ // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
// cond: !uaddOvf(c,d)
- // result: (Lsh8x64 x (Const64 <t> [c+d]))
+ // result: (Lsh64x64 x (Const64 <t> [c+d]))
for {
t := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh8x64 {
+ if v_0.Op != OpLsh64x64 {
break
}
_ = v_0.Args[1]
if !(!uaddOvf(c, d)) {
break
}
- v.reset(OpLsh8x64)
+ v.reset(OpLsh64x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = c + d
v.AddArg(v0)
return true
}
- // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
- // result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
+ // result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpRsh8Ux64 {
+ if v_0.Op != OpRsh64Ux64 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpLsh8x64 {
+ if v_0_0.Op != OpLsh64x64 {
break
}
_ = v_0_0.Args[1]
if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
- v.reset(OpLsh8x64)
+ v.reset(OpLsh64x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v0.AuxInt = c1 - c2 + c3
}
return false
}
-func rewriteValuegeneric_OpLsh8x8_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh64x8_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh8x8 <t> x (Const8 [c]))
+ // match: (Lsh64x8 <t> x (Const8 [c]))
// cond:
- // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
+ // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
for {
t := v.Type
_ = v.Args[1]
break
}
c := v_1.AuxInt
- v.reset(OpLsh8x64)
+ v.reset(OpLsh64x64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = int64(uint8(c))
v.AddArg(v0)
return true
}
- // match: (Lsh8x8 (Const8 [0]) _)
+ // match: (Lsh64x8 (Const64 [0]) _)
// cond:
- // result: (Const8 [0])
+ // result: (Const64 [0])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst64 {
break
}
if v_0.AuxInt != 0 {
break
}
- v.reset(OpConst8)
+ v.reset(OpConst64)
v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpMod16_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh8x16_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod16 (Const16 [c]) (Const16 [d]))
- // cond: d != 0
- // result: (Const16 [int64(int16(c % d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
- break
- }
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
- break
- }
- d := v_1.AuxInt
- if !(d != 0) {
- break
- }
- v.reset(OpConst16)
- v.AuxInt = int64(int16(c % d))
- return true
- }
- // match: (Mod16 <t> n (Const16 [c]))
- // cond: isNonNegative(n) && isPowerOfTwo(c&0xffff)
- // result: (And16 n (Const16 <t> [(c&0xffff)-1]))
+ // match: (Lsh8x16 <t> x (Const16 [c]))
+ // cond:
+ // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
for {
t := v.Type
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpConst16 {
break
}
c := v_1.AuxInt
- if !(isNonNegative(n) && isPowerOfTwo(c&0xffff)) {
- break
- }
- v.reset(OpAnd16)
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst16, t)
- v0.AuxInt = (c & 0xffff) - 1
+ v.reset(OpLsh8x64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = int64(uint16(c))
v.AddArg(v0)
return true
}
- // match: (Mod16 <t> n (Const16 [c]))
- // cond: c < 0 && c != -1<<15
- // result: (Mod16 <t> n (Const16 <t> [-c]))
+ // match: (Lsh8x16 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
for {
- t := v.Type
_ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
break
}
- c := v_1.AuxInt
- if !(c < 0 && c != -1<<15) {
+ if v_0.AuxInt != 0 {
break
}
- v.reset(OpMod16)
- v.Type = t
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst16, t)
- v0.AuxInt = -c
- v.AddArg(v0)
+ v.reset(OpConst8)
+ v.AuxInt = 0
return true
}
- // match: (Mod16 <t> x (Const16 [c]))
- // cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
- // result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
+ return false
+}
+func rewriteValuegeneric_OpLsh8x32_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Lsh8x32 <t> x (Const32 [c]))
+ // cond:
+ // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpConst32 {
break
}
c := v_1.AuxInt
- if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
- break
- }
- v.reset(OpSub16)
+ v.reset(OpLsh8x64)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul16, t)
- v1 := b.NewValue0(v.Pos, OpDiv16, t)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = c
- v1.AddArg(v2)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst16, t)
- v3.AuxInt = c
- v0.AddArg(v3)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = int64(uint32(c))
v.AddArg(v0)
return true
}
+ // match: (Lsh8x32 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
-func rewriteValuegeneric_OpMod16u_0(v *Value) bool {
+func rewriteValuegeneric_OpLsh8x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod16u (Const16 [c]) (Const16 [d]))
- // cond: d != 0
- // result: (Const16 [int64(uint16(c) % uint16(d))])
+ typ := &b.Func.Config.Types
+ _ = typ
+ // match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
+ // cond:
+ // result: (Const8 [int64(int8(c) << uint64(d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ if v_0.Op != OpConst8 {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpConst64 {
break
}
d := v_1.AuxInt
- if !(d != 0) {
+ v.reset(OpConst8)
+ v.AuxInt = int64(int8(c) << uint64(d))
+ return true
+ }
+ // match: (Lsh8x64 x (Const64 [0]))
+ // cond:
+ // result: x
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
break
}
- v.reset(OpConst16)
- v.AuxInt = int64(uint16(c) % uint16(d))
+ if v_1.AuxInt != 0 {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
return true
}
- // match: (Mod16u <t> n (Const16 [c]))
- // cond: isPowerOfTwo(c&0xffff)
- // result: (And16 n (Const16 <t> [(c&0xffff)-1]))
+ // match: (Lsh8x64 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
for {
- t := v.Type
_ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
break
}
- c := v_1.AuxInt
- if !(isPowerOfTwo(c & 0xffff)) {
+ if v_0.AuxInt != 0 {
break
}
- v.reset(OpAnd16)
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst16, t)
- v0.AuxInt = (c & 0xffff) - 1
- v.AddArg(v0)
+ v.reset(OpConst8)
+ v.AuxInt = 0
return true
}
- // match: (Mod16u <t> x (Const16 [c]))
- // cond: x.Op != OpConst16 && c > 0 && umagicOK(16,c)
- // result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
+ // match: (Lsh8x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 8
+ // result: (Const8 [0])
for {
- t := v.Type
_ = v.Args[1]
- x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpConst64 {
break
}
c := v_1.AuxInt
- if !(x.Op != OpConst16 && c > 0 && umagicOK(16, c)) {
+ if !(uint64(c) >= 8) {
break
}
- v.reset(OpSub16)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul16, t)
- v1 := b.NewValue0(v.Pos, OpDiv16u, t)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = c
- v1.AddArg(v2)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst16, t)
- v3.AuxInt = c
- v0.AddArg(v3)
- v.AddArg(v0)
+ v.reset(OpConst8)
+ v.AuxInt = 0
return true
}
- return false
-}
-func rewriteValuegeneric_OpMod32_0(v *Value) bool {
- b := v.Block
- _ = b
- // match: (Mod32 (Const32 [c]) (Const32 [d]))
- // cond: d != 0
- // result: (Const32 [int64(int32(c % d))])
+ // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
+ // cond: !uaddOvf(c,d)
+ // result: (Lsh8x64 x (Const64 <t> [c+d]))
for {
+ t := v.Type
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpLsh8x64 {
break
}
- c := v_0.AuxInt
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst64 {
break
}
d := v_1.AuxInt
- if !(d != 0) {
+ if !(!uaddOvf(c, d)) {
break
}
- v.reset(OpConst32)
- v.AuxInt = int64(int32(c % d))
+ v.reset(OpLsh8x64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c + d
+ v.AddArg(v0)
return true
}
- // match: (Mod32 <t> n (Const32 [c]))
- // cond: isNonNegative(n) && isPowerOfTwo(c&0xffffffff)
- // result: (And32 n (Const32 <t> [(c&0xffffffff)-1]))
+ // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
for {
- t := v.Type
_ = v.Args[1]
- n := v.Args[0]
+ v_0 := v.Args[0]
+ if v_0.Op != OpRsh8Ux64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpLsh8x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ x := v_0_0.Args[0]
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ c1 := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
- if !(isNonNegative(n) && isPowerOfTwo(c&0xffffffff)) {
+ c3 := v_1.AuxInt
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
- v.reset(OpAnd32)
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = (c & 0xffffffff) - 1
+ v.reset(OpLsh8x64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
- // match: (Mod32 <t> n (Const32 [c]))
- // cond: c < 0 && c != -1<<31
- // result: (Mod32 <t> n (Const32 <t> [-c]))
+ return false
+}
+func rewriteValuegeneric_OpLsh8x8_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Lsh8x8 <t> x (Const8 [c]))
+ // cond:
+ // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
for {
t := v.Type
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst8 {
break
}
c := v_1.AuxInt
- if !(c < 0 && c != -1<<31) {
- break
- }
- v.reset(OpMod32)
- v.Type = t
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = -c
+ v.reset(OpLsh8x64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = int64(uint8(c))
v.AddArg(v0)
return true
}
- // match: (Mod32 <t> x (Const32 [c]))
- // cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
- // result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
+ // match: (Lsh8x8 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
for {
- t := v.Type
_ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
break
}
- c := v_1.AuxInt
- if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
+ if v_0.AuxInt != 0 {
break
}
- v.reset(OpSub32)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul32, t)
- v1 := b.NewValue0(v.Pos, OpDiv32, t)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = c
- v1.AddArg(v2)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, t)
- v3.AuxInt = c
- v0.AddArg(v3)
- v.AddArg(v0)
+ v.reset(OpConst8)
+ v.AuxInt = 0
return true
}
return false
}
-func rewriteValuegeneric_OpMod32u_0(v *Value) bool {
+func rewriteValuegeneric_OpMod16_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod32u (Const32 [c]) (Const32 [d]))
+ // match: (Mod16 (Const16 [c]) (Const16 [d]))
// cond: d != 0
- // result: (Const32 [int64(uint32(c) % uint32(d))])
+ // result: (Const16 [int64(int16(c % d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpConst16 {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst16 {
break
}
d := v_1.AuxInt
if !(d != 0) {
break
}
- v.reset(OpConst32)
- v.AuxInt = int64(uint32(c) % uint32(d))
+ v.reset(OpConst16)
+ v.AuxInt = int64(int16(c % d))
return true
}
- // match: (Mod32u <t> n (Const32 [c]))
- // cond: isPowerOfTwo(c&0xffffffff)
- // result: (And32 n (Const32 <t> [(c&0xffffffff)-1]))
+ // match: (Mod16 <t> n (Const16 [c]))
+ // cond: isNonNegative(n) && isPowerOfTwo(c&0xffff)
+ // result: (And16 n (Const16 <t> [(c&0xffff)-1]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst16 {
break
}
c := v_1.AuxInt
- if !(isPowerOfTwo(c & 0xffffffff)) {
+ if !(isNonNegative(n) && isPowerOfTwo(c&0xffff)) {
break
}
- v.reset(OpAnd32)
+ v.reset(OpAnd16)
v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = (c & 0xffffffff) - 1
+ v0 := b.NewValue0(v.Pos, OpConst16, t)
+ v0.AuxInt = (c & 0xffff) - 1
v.AddArg(v0)
return true
}
- // match: (Mod32u <t> x (Const32 [c]))
- // cond: x.Op != OpConst32 && c > 0 && umagicOK(32,c)
- // result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
+ // match: (Mod16 <t> n (Const16 [c]))
+ // cond: c < 0 && c != -1<<15
+ // result: (Mod16 <t> n (Const16 <t> [-c]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c < 0 && c != -1<<15) {
+ break
+ }
+ v.reset(OpMod16)
+ v.Type = t
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst16, t)
+ v0.AuxInt = -c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod16 <t> x (Const16 [c]))
+ // cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
+ // result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst16 {
break
}
c := v_1.AuxInt
- if !(x.Op != OpConst32 && c > 0 && umagicOK(32, c)) {
+ if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
break
}
- v.reset(OpSub32)
+ v.reset(OpSub16)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul32, t)
- v1 := b.NewValue0(v.Pos, OpDiv32u, t)
+ v0 := b.NewValue0(v.Pos, OpMul16, t)
+ v1 := b.NewValue0(v.Pos, OpDiv16, t)
v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
v2.AuxInt = c
v1.AddArg(v2)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, t)
+ v3 := b.NewValue0(v.Pos, OpConst16, t)
v3.AuxInt = c
v0.AddArg(v3)
v.AddArg(v0)
}
return false
}
-func rewriteValuegeneric_OpMod64_0(v *Value) bool {
+func rewriteValuegeneric_OpMod16u_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod64 (Const64 [c]) (Const64 [d]))
+ // match: (Mod16u (Const16 [c]) (Const16 [d]))
// cond: d != 0
- // result: (Const64 [c % d])
+ // result: (Const16 [int64(uint16(c) % uint16(d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst16 {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst16 {
break
}
d := v_1.AuxInt
if !(d != 0) {
break
}
- v.reset(OpConst64)
- v.AuxInt = c % d
- return true
- }
- // match: (Mod64 <t> n (Const64 [c]))
- // cond: isNonNegative(n) && isPowerOfTwo(c)
- // result: (And64 n (Const64 <t> [c-1]))
- for {
- t := v.Type
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(isNonNegative(n) && isPowerOfTwo(c)) {
- break
- }
- v.reset(OpAnd64)
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - 1
- v.AddArg(v0)
- return true
- }
- // match: (Mod64 n (Const64 [-1<<63]))
- // cond: isNonNegative(n)
- // result: n
- for {
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
- break
- }
- if v_1.AuxInt != -1<<63 {
- break
- }
- if !(isNonNegative(n)) {
- break
- }
- v.reset(OpCopy)
- v.Type = n.Type
- v.AddArg(n)
+ v.reset(OpConst16)
+ v.AuxInt = int64(uint16(c) % uint16(d))
return true
}
- // match: (Mod64 <t> n (Const64 [c]))
- // cond: c < 0 && c != -1<<63
- // result: (Mod64 <t> n (Const64 <t> [-c]))
+ // match: (Mod16u <t> n (Const16 [c]))
+ // cond: isPowerOfTwo(c&0xffff)
+ // result: (And16 n (Const16 <t> [(c&0xffff)-1]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst16 {
break
}
c := v_1.AuxInt
- if !(c < 0 && c != -1<<63) {
+ if !(isPowerOfTwo(c & 0xffff)) {
break
}
- v.reset(OpMod64)
- v.Type = t
+ v.reset(OpAnd16)
v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = -c
+ v0 := b.NewValue0(v.Pos, OpConst16, t)
+ v0.AuxInt = (c & 0xffff) - 1
v.AddArg(v0)
return true
}
- // match: (Mod64 <t> x (Const64 [c]))
- // cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
- // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
+ // match: (Mod16u <t> x (Const16 [c]))
+ // cond: x.Op != OpConst16 && c > 0 && umagicOK(16,c)
+ // result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst16 {
break
}
c := v_1.AuxInt
- if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
+ if !(x.Op != OpConst16 && c > 0 && umagicOK(16, c)) {
break
}
- v.reset(OpSub64)
+ v.reset(OpSub16)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul64, t)
- v1 := b.NewValue0(v.Pos, OpDiv64, t)
+ v0 := b.NewValue0(v.Pos, OpMul16, t)
+ v1 := b.NewValue0(v.Pos, OpDiv16u, t)
v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
v2.AuxInt = c
v1.AddArg(v2)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpConst16, t)
v3.AuxInt = c
v0.AddArg(v3)
v.AddArg(v0)
}
return false
}
-func rewriteValuegeneric_OpMod64u_0(v *Value) bool {
+func rewriteValuegeneric_OpMod32_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod64u (Const64 [c]) (Const64 [d]))
+ // match: (Mod32 (Const32 [c]) (Const32 [d]))
// cond: d != 0
- // result: (Const64 [int64(uint64(c) % uint64(d))])
+ // result: (Const32 [int64(int32(c % d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst32 {
break
}
d := v_1.AuxInt
if !(d != 0) {
break
}
- v.reset(OpConst64)
- v.AuxInt = int64(uint64(c) % uint64(d))
+ v.reset(OpConst32)
+ v.AuxInt = int64(int32(c % d))
return true
}
- // match: (Mod64u <t> n (Const64 [c]))
- // cond: isPowerOfTwo(c)
- // result: (And64 n (Const64 <t> [c-1]))
+ // match: (Mod32 <t> n (Const32 [c]))
+ // cond: isNonNegative(n) && isPowerOfTwo(c&0xffffffff)
+ // result: (And32 n (Const32 <t> [(c&0xffffffff)-1]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst32 {
break
}
c := v_1.AuxInt
- if !(isPowerOfTwo(c)) {
+ if !(isNonNegative(n) && isPowerOfTwo(c&0xffffffff)) {
break
}
- v.reset(OpAnd64)
+ v.reset(OpAnd32)
v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - 1
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = (c & 0xffffffff) - 1
v.AddArg(v0)
return true
}
- // match: (Mod64u <t> n (Const64 [-1<<63]))
- // cond:
- // result: (And64 n (Const64 <t> [1<<63-1]))
+ // match: (Mod32 <t> n (Const32 [c]))
+ // cond: c < 0 && c != -1<<31
+ // result: (Mod32 <t> n (Const32 <t> [-c]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst32 {
break
}
- if v_1.AuxInt != -1<<63 {
+ c := v_1.AuxInt
+ if !(c < 0 && c != -1<<31) {
break
}
- v.reset(OpAnd64)
+ v.reset(OpMod32)
+ v.Type = t
v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = 1<<63 - 1
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = -c
v.AddArg(v0)
return true
}
- // match: (Mod64u <t> x (Const64 [c]))
- // cond: x.Op != OpConst64 && c > 0 && umagicOK(64,c)
- // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
+ // match: (Mod32 <t> x (Const32 [c]))
+ // cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
+ // result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
for {
t := v.Type
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpConst32 {
break
}
c := v_1.AuxInt
- if !(x.Op != OpConst64 && c > 0 && umagicOK(64, c)) {
+ if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
break
}
- v.reset(OpSub64)
+ v.reset(OpSub32)
v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul64, t)
- v1 := b.NewValue0(v.Pos, OpDiv64u, t)
+ v0 := b.NewValue0(v.Pos, OpMul32, t)
+ v1 := b.NewValue0(v.Pos, OpDiv32, t)
v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
v2.AuxInt = c
v1.AddArg(v2)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpConst32, t)
v3.AuxInt = c
v0.AddArg(v3)
v.AddArg(v0)
}
return false
}
-func rewriteValuegeneric_OpMod8_0(v *Value) bool {
+func rewriteValuegeneric_OpMod32u_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod8 (Const8 [c]) (Const8 [d]))
+ // match: (Mod32u (Const32 [c]) (Const32 [d]))
// cond: d != 0
- // result: (Const8 [int64(int8(c % d))])
+ // result: (Const32 [int64(uint32(c) % uint32(d))])
for {
_ = v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpConst32 {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if v_1.Op != OpConst32 {
break
}
d := v_1.AuxInt
if !(d != 0) {
break
}
- v.reset(OpConst8)
- v.AuxInt = int64(int8(c % d))
+ v.reset(OpConst32)
+ v.AuxInt = int64(uint32(c) % uint32(d))
return true
}
- // match: (Mod8 <t> n (Const8 [c]))
- // cond: isNonNegative(n) && isPowerOfTwo(c&0xff)
- // result: (And8 n (Const8 <t> [(c&0xff)-1]))
+ // match: (Mod32u <t> n (Const32 [c]))
+ // cond: isPowerOfTwo(c&0xffffffff)
+ // result: (And32 n (Const32 <t> [(c&0xffffffff)-1]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if v_1.Op != OpConst32 {
break
}
c := v_1.AuxInt
- if !(isNonNegative(n) && isPowerOfTwo(c&0xff)) {
+ if !(isPowerOfTwo(c & 0xffffffff)) {
break
}
- v.reset(OpAnd8)
+ v.reset(OpAnd32)
v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = (c & 0xff) - 1
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = (c & 0xffffffff) - 1
v.AddArg(v0)
return true
}
- // match: (Mod8 <t> n (Const8 [c]))
- // cond: c < 0 && c != -1<<7
- // result: (Mod8 <t> n (Const8 <t> [-c]))
+ // match: (Mod32u <t> x (Const32 [c]))
+ // cond: x.Op != OpConst32 && c > 0 && umagicOK(32,c)
+ // result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(x.Op != OpConst32 && c > 0 && umagicOK(32, c)) {
+ break
+ }
+ v.reset(OpSub32)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpMul32, t)
+ v1 := b.NewValue0(v.Pos, OpDiv32u, t)
+ v1.AddArg(x)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = c
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, t)
+ v3.AuxInt = c
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMod64_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Mod64 (Const64 [c]) (Const64 [d]))
+ // cond: d != 0
+ // result: (Const64 [c % d])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ d := v_1.AuxInt
+ if !(d != 0) {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = c % d
+ return true
+ }
+ // match: (Mod64 <t> n (Const64 [c]))
+ // cond: isNonNegative(n) && isPowerOfTwo(c)
+ // result: (And64 n (Const64 <t> [c-1]))
for {
t := v.Type
_ = v.Args[1]
n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isNonNegative(n) && isPowerOfTwo(c)) {
+ break
+ }
+ v.reset(OpAnd64)
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - 1
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod64 n (Const64 [-1<<63]))
+ // cond: isNonNegative(n)
+ // result: n
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.AuxInt != -1<<63 {
+ break
+ }
+ if !(isNonNegative(n)) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = n.Type
+ v.AddArg(n)
+ return true
+ }
+ // match: (Mod64 <t> n (Const64 [c]))
+ // cond: c < 0 && c != -1<<63
+ // result: (Mod64 <t> n (Const64 <t> [-c]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c < 0 && c != -1<<63) {
+ break
+ }
+ v.reset(OpMod64)
+ v.Type = t
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = -c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod64 <t> x (Const64 [c]))
+ // cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
+ // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
+ break
+ }
+ v.reset(OpSub64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpMul64, t)
+ v1 := b.NewValue0(v.Pos, OpDiv64, t)
+ v1.AddArg(x)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = c
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3.AuxInt = c
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMod64u_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Mod64u (Const64 [c]) (Const64 [d]))
+ // cond: d != 0
+ // result: (Const64 [int64(uint64(c) % uint64(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ d := v_1.AuxInt
+ if !(d != 0) {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = int64(uint64(c) % uint64(d))
+ return true
+ }
+ // match: (Mod64u <t> n (Const64 [c]))
+ // cond: isPowerOfTwo(c)
+ // result: (And64 n (Const64 <t> [c-1]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isPowerOfTwo(c)) {
+ break
+ }
+ v.reset(OpAnd64)
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - 1
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod64u <t> n (Const64 [-1<<63]))
+ // cond:
+ // result: (And64 n (Const64 <t> [1<<63-1]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.AuxInt != -1<<63 {
+ break
+ }
+ v.reset(OpAnd64)
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = 1<<63 - 1
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod64u <t> x (Const64 [c]))
+ // cond: x.Op != OpConst64 && c > 0 && umagicOK(64,c)
+ // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(x.Op != OpConst64 && c > 0 && umagicOK(64, c)) {
+ break
+ }
+ v.reset(OpSub64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpMul64, t)
+ v1 := b.NewValue0(v.Pos, OpDiv64u, t)
+ v1.AddArg(x)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = c
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3.AuxInt = c
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMod8_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Mod8 (Const8 [c]) (Const8 [d]))
+ // cond: d != 0
+ // result: (Const8 [int64(int8(c % d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ d := v_1.AuxInt
+ if !(d != 0) {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = int64(int8(c % d))
+ return true
+ }
+ // match: (Mod8 <t> n (Const8 [c]))
+ // cond: isNonNegative(n) && isPowerOfTwo(c&0xff)
+ // result: (And8 n (Const8 <t> [(c&0xff)-1]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isNonNegative(n) && isPowerOfTwo(c&0xff)) {
+ break
+ }
+ v.reset(OpAnd8)
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = (c & 0xff) - 1
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod8 <t> n (Const8 [c]))
+ // cond: c < 0 && c != -1<<7
+ // result: (Mod8 <t> n (Const8 <t> [-c]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c < 0 && c != -1<<7) {
+ break
+ }
+ v.reset(OpMod8)
+ v.Type = t
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = -c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod8 <t> x (Const8 [c]))
+ // cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
+ // result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
+ break
+ }
+ v.reset(OpSub8)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpMul8, t)
+ v1 := b.NewValue0(v.Pos, OpDiv8, t)
+ v1.AddArg(x)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = c
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, t)
+ v3.AuxInt = c
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMod8u_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Mod8u (Const8 [c]) (Const8 [d]))
+ // cond: d != 0
+ // result: (Const8 [int64(uint8(c) % uint8(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ d := v_1.AuxInt
+ if !(d != 0) {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = int64(uint8(c) % uint8(d))
+ return true
+ }
+ // match: (Mod8u <t> n (Const8 [c]))
+ // cond: isPowerOfTwo(c&0xff)
+ // result: (And8 n (Const8 <t> [(c&0xff)-1]))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isPowerOfTwo(c & 0xff)) {
+ break
+ }
+ v.reset(OpAnd8)
+ v.AddArg(n)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = (c & 0xff) - 1
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Mod8u <t> x (Const8 [c]))
+ // cond: x.Op != OpConst8 && c > 0 && umagicOK(8 ,c)
+ // result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
+ for {
+ t := v.Type
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(x.Op != OpConst8 && c > 0 && umagicOK(8, c)) {
+ break
+ }
+ v.reset(OpSub8)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpMul8, t)
+ v1 := b.NewValue0(v.Pos, OpDiv8u, t)
+ v1.AddArg(x)
+ v2 := b.NewValue0(v.Pos, OpConst8, t)
+ v2.AuxInt = c
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, t)
+ v3.AuxInt = c
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMove_0(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
+ // cond: isSamePtr(src, dst2)
+ // result: (Zero {t} [n] dst1 mem)
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpZero {
+ break
+ }
+ if mem.AuxInt != n {
+ break
+ }
+ if mem.Aux != t {
+ break
+ }
+ _ = mem.Args[1]
+ dst2 := mem.Args[0]
+ if !(isSamePtr(src, dst2)) {
+ break
+ }
+ v.reset(OpZero)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _)))
+ // cond: isSamePtr(src, dst0)
+ // result: (Zero {t} [n] dst1 mem)
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpZero {
+ break
+ }
+ if mem_0.AuxInt != n {
+ break
+ }
+ if mem_0.Aux != t {
+ break
+ }
+ _ = mem_0.Args[1]
+ dst0 := mem_0.Args[0]
+ if !(isSamePtr(src, dst0)) {
+ break
+ }
+ v.reset(OpZero)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
+ // cond: isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2 + sizeof(t2) && disjoint(src1, n, op, sizeof(t2)) && clobber(store)
+ // result: (Move {t1} [n] dst1 src1 mem)
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src1 := v.Args[1]
+ store := v.Args[2]
+ if store.Op != OpStore {
+ break
+ }
+ t2 := store.Aux
+ _ = store.Args[2]
+ op := store.Args[0]
+ if op.Op != OpOffPtr {
+ break
+ }
+ o2 := op.AuxInt
+ dst2 := op.Args[0]
+ mem := store.Args[2]
+ if !(isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2+sizeof(t2) && disjoint(src1, n, op, sizeof(t2)) && clobber(store)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = n
+ v.Aux = t1
+ v.AddArg(dst1)
+ v.AddArg(src1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
+ // cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
+ // result: (Move {t} [n] dst1 src1 mem)
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src1 := v.Args[1]
+ move := v.Args[2]
+ if move.Op != OpMove {
+ break
+ }
+ if move.AuxInt != n {
+ break
+ }
+ if move.Aux != t {
+ break
+ }
+ _ = move.Args[2]
+ dst2 := move.Args[0]
+ mem := move.Args[2]
+ if !(move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(src1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
+ // cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move) && clobber(vardef)
+ // result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src1 := v.Args[1]
+ vardef := v.Args[2]
+ if vardef.Op != OpVarDef {
+ break
+ }
+ x := vardef.Aux
+ move := vardef.Args[0]
+ if move.Op != OpMove {
+ break
+ }
+ if move.AuxInt != n {
+ break
+ }
+ if move.Aux != t {
+ break
+ }
+ _ = move.Args[2]
+ dst2 := move.Args[0]
+ mem := move.Args[2]
+ if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move) && clobber(vardef)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(src1)
+ v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
+ v0.Aux = x
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
+ // cond: zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)
+ // result: (Move {t} [n] dst1 src1 mem)
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src1 := v.Args[1]
+ zero := v.Args[2]
+ if zero.Op != OpZero {
+ break
+ }
+ if zero.AuxInt != n {
+ break
+ }
+ if zero.Aux != t {
+ break
+ }
+ _ = zero.Args[1]
+ dst2 := zero.Args[0]
+ mem := zero.Args[1]
+ if !(zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(src1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
+ // cond: zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero) && clobber(vardef)
+ // result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[2]
+ dst1 := v.Args[0]
+ src1 := v.Args[1]
+ vardef := v.Args[2]
+ if vardef.Op != OpVarDef {
+ break
+ }
+ x := vardef.Aux
+ zero := vardef.Args[0]
+ if zero.Op != OpZero {
+ break
+ }
+ if zero.AuxInt != n {
+ break
+ }
+ if zero.Aux != t {
+ break
+ }
+ _ = zero.Args[1]
+ dst2 := zero.Args[0]
+ mem := zero.Args[1]
+ if !(zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero) && clobber(vardef)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(src1)
+ v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
+ v0.Aux = x
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [0] p3) d2 _)))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && o2 == sizeof(t3) && n == sizeof(t2) + sizeof(t3)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [0] dst) d2 mem))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ op2 := mem.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ op3 := mem_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ if op3.AuxInt != 0 {
+ break
+ }
+ p3 := op3.Args[0]
+ d2 := mem_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && o2 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = 0
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v1.AddArg(mem)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [o3] p3) d2 (Store {t4} op4:(OffPtr [0] p4) d3 _))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2) + sizeof(t3) + sizeof(t4)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2 (Store {t4} (OffPtr <t4.(*types.Type)> [0] dst) d3 mem)))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ op2 := mem.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ op3 := mem_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d2 := mem_2.Args[1]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_2_2.Aux
+ _ = mem_2_2.Args[2]
+ op4 := mem_2_2.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ if op4.AuxInt != 0 {
+ break
+ }
+ p4 := op4.Args[0]
+ d3 := mem_2_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)+sizeof(t4)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, t4.(*types.Type))
+ v4.AuxInt = 0
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v3.AddArg(mem)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [o3] p3) d2 (Store {t4} op4:(OffPtr [o4] p4) d3 (Store {t5} op5:(OffPtr [0] p5) d4 _)))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == sizeof(t5) && o3-o4 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2) + sizeof(t3) + sizeof(t4) + sizeof(t5)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2 (Store {t4} (OffPtr <t4.(*types.Type)> [o4] dst) d3 (Store {t5} (OffPtr <t5.(*types.Type)> [0] dst) d4 mem))))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ op2 := mem.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ op3 := mem_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d2 := mem_2.Args[1]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_2_2.Aux
+ _ = mem_2_2.Args[2]
+ op4 := mem_2_2.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ o4 := op4.AuxInt
+ p4 := op4.Args[0]
+ d3 := mem_2_2.Args[1]
+ mem_2_2_2 := mem_2_2.Args[2]
+ if mem_2_2_2.Op != OpStore {
+ break
+ }
+ t5 := mem_2_2_2.Aux
+ _ = mem_2_2_2.Args[2]
+ op5 := mem_2_2_2.Args[0]
+ if op5.Op != OpOffPtr {
+ break
+ }
+ if op5.AuxInt != 0 {
+ break
+ }
+ p5 := op5.Args[0]
+ d4 := mem_2_2_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == sizeof(t5) && o3-o4 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)+sizeof(t4)+sizeof(t5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, t4.(*types.Type))
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v5.Aux = t5
+ v6 := b.NewValue0(v.Pos, OpOffPtr, t5.(*types.Type))
+ v6.AuxInt = 0
+ v6.AddArg(dst)
+ v5.AddArg(v6)
+ v5.AddArg(d4)
+ v5.AddArg(mem)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpMove_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [0] p3) d2 _))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && o2 == sizeof(t3) && n == sizeof(t2) + sizeof(t3)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [0] dst) d2 mem))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ op2 := mem_0.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ op3 := mem_0_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ if op3.AuxInt != 0 {
+ break
+ }
+ p3 := op3.Args[0]
+ d2 := mem_0_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && o2 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = 0
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v1.AddArg(mem)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [o3] p3) d2 (Store {t4} op4:(OffPtr [0] p4) d3 _)))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2) + sizeof(t3) + sizeof(t4)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2 (Store {t4} (OffPtr <t4.(*types.Type)> [0] dst) d3 mem)))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ op2 := mem_0.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ op3 := mem_0_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d2 := mem_0_2.Args[1]
+ mem_0_2_2 := mem_0_2.Args[2]
+ if mem_0_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_0_2_2.Aux
+ _ = mem_0_2_2.Args[2]
+ op4 := mem_0_2_2.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ if op4.AuxInt != 0 {
+ break
+ }
+ p4 := op4.Args[0]
+ d3 := mem_0_2_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)+sizeof(t4)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, t4.(*types.Type))
+ v4.AuxInt = 0
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v3.AddArg(mem)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr [o2] p2) d1 (Store {t3} op3:(OffPtr [o3] p3) d2 (Store {t4} op4:(OffPtr [o4] p4) d3 (Store {t5} op5:(OffPtr [0] p5) d4 _))))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == sizeof(t5) && o3-o4 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2) + sizeof(t3) + sizeof(t4) + sizeof(t5)
+ // result: (Store {t2} (OffPtr <t2.(*types.Type)> [o2] dst) d1 (Store {t3} (OffPtr <t3.(*types.Type)> [o3] dst) d2 (Store {t4} (OffPtr <t4.(*types.Type)> [o4] dst) d3 (Store {t5} (OffPtr <t5.(*types.Type)> [0] dst) d4 mem))))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ op2 := mem_0.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ op3 := mem_0_2.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d2 := mem_0_2.Args[1]
+ mem_0_2_2 := mem_0_2.Args[2]
+ if mem_0_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_0_2_2.Aux
+ _ = mem_0_2_2.Args[2]
+ op4 := mem_0_2_2.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ o4 := op4.AuxInt
+ p4 := op4.Args[0]
+ d3 := mem_0_2_2.Args[1]
+ mem_0_2_2_2 := mem_0_2_2.Args[2]
+ if mem_0_2_2_2.Op != OpStore {
+ break
+ }
+ t5 := mem_0_2_2_2.Aux
+ _ = mem_0_2_2_2.Args[2]
+ op5 := mem_0_2_2_2.Args[0]
+ if op5.Op != OpOffPtr {
+ break
+ }
+ if op5.AuxInt != 0 {
+ break
+ }
+ p5 := op5.Args[0]
+ d4 := mem_0_2_2_2.Args[1]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == sizeof(t5) && o3-o4 == sizeof(t4) && o2-o3 == sizeof(t3) && n == sizeof(t2)+sizeof(t3)+sizeof(t4)+sizeof(t5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, t2.(*types.Type))
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, t3.(*types.Type))
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, t4.(*types.Type))
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v5.Aux = t5
+ v6 := b.NewValue0(v.Pos, OpOffPtr, t5.(*types.Type))
+ v6.AuxInt = 0
+ v6.AddArg(dst)
+ v5.AddArg(v6)
+ v5.AddArg(d4)
+ v5.AddArg(mem)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _)))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && n >= o2 + sizeof(t2)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ op2 := mem.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ tt2 := op2.Type
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpZero {
+ break
+ }
+ if mem_2.AuxInt != n {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[1]
+ p3 := mem_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && n >= o2+sizeof(t2)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v1.AuxInt = n
+ v1.Aux = t1
+ v1.AddArg(dst)
+ v1.AddArg(mem)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpOffPtr {
+ break
+ }
+ tt2 := mem_0.Type
+ o2 := mem_0.AuxInt
+ p2 := mem_0.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ mem_2_0 := mem_2.Args[0]
+ if mem_2_0.Op != OpOffPtr {
+ break
+ }
+ tt3 := mem_2_0.Type
+ o3 := mem_2_0.AuxInt
+ p3 := mem_2_0.Args[0]
+ d2 := mem_2.Args[1]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpZero {
+ break
+ }
+ if mem_2_2.AuxInt != n {
+ break
+ }
+ t4 := mem_2_2.Aux
+ _ = mem_2_2.Args[1]
+ p4 := mem_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v3.AuxInt = n
+ v3.Aux = t1
+ v3.AddArg(dst)
+ v3.AddArg(mem)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _)))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3) && n >= o4 + sizeof(t4)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpOffPtr {
+ break
+ }
+ tt2 := mem_0.Type
+ o2 := mem_0.AuxInt
+ p2 := mem_0.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ mem_2_0 := mem_2.Args[0]
+ if mem_2_0.Op != OpOffPtr {
+ break
+ }
+ tt3 := mem_2_0.Type
+ o3 := mem_2_0.AuxInt
+ p3 := mem_2_0.Args[0]
+ d2 := mem_2.Args[1]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_2_2.Aux
+ _ = mem_2_2.Args[2]
+ mem_2_2_0 := mem_2_2.Args[0]
+ if mem_2_2_0.Op != OpOffPtr {
+ break
+ }
+ tt4 := mem_2_2_0.Type
+ o4 := mem_2_2_0.AuxInt
+ p4 := mem_2_2_0.Args[0]
+ d3 := mem_2_2.Args[1]
+ mem_2_2_2 := mem_2_2.Args[2]
+ if mem_2_2_2.Op != OpZero {
+ break
+ }
+ if mem_2_2_2.AuxInt != n {
+ break
+ }
+ t5 := mem_2_2_2.Aux
+ _ = mem_2_2_2.Args[1]
+ p5 := mem_2_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3) && n >= o4+sizeof(t4)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v5.AuxInt = n
+ v5.Aux = t1
+ v5.AddArg(dst)
+ v5.AddArg(mem)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _))))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && alignof(t6) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3) && n >= o4 + sizeof(t4) && n >= o5 + sizeof(t5)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpOffPtr {
+ break
+ }
+ tt2 := mem_0.Type
+ o2 := mem_0.AuxInt
+ p2 := mem_0.Args[0]
+ d1 := mem.Args[1]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ mem_2_0 := mem_2.Args[0]
+ if mem_2_0.Op != OpOffPtr {
+ break
+ }
+ tt3 := mem_2_0.Type
+ o3 := mem_2_0.AuxInt
+ p3 := mem_2_0.Args[0]
+ d2 := mem_2.Args[1]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_2_2.Aux
+ _ = mem_2_2.Args[2]
+ mem_2_2_0 := mem_2_2.Args[0]
+ if mem_2_2_0.Op != OpOffPtr {
+ break
+ }
+ tt4 := mem_2_2_0.Type
+ o4 := mem_2_2_0.AuxInt
+ p4 := mem_2_2_0.Args[0]
+ d3 := mem_2_2.Args[1]
+ mem_2_2_2 := mem_2_2.Args[2]
+ if mem_2_2_2.Op != OpStore {
+ break
+ }
+ t5 := mem_2_2_2.Aux
+ _ = mem_2_2_2.Args[2]
+ mem_2_2_2_0 := mem_2_2_2.Args[0]
+ if mem_2_2_2_0.Op != OpOffPtr {
+ break
+ }
+ tt5 := mem_2_2_2_0.Type
+ o5 := mem_2_2_2_0.AuxInt
+ p5 := mem_2_2_2_0.Args[0]
+ d4 := mem_2_2_2.Args[1]
+ mem_2_2_2_2 := mem_2_2_2.Args[2]
+ if mem_2_2_2_2.Op != OpZero {
+ break
+ }
+ if mem_2_2_2_2.AuxInt != n {
+ break
+ }
+ t6 := mem_2_2_2_2.Aux
+ _ = mem_2_2_2_2.Args[1]
+ p6 := mem_2_2_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && alignof(t6) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3) && n >= o4+sizeof(t4) && n >= o5+sizeof(t5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v5.Aux = t5
+ v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
+ v6.AuxInt = o5
+ v6.AddArg(dst)
+ v5.AddArg(v6)
+ v5.AddArg(d4)
+ v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v7.AuxInt = n
+ v7.Aux = t1
+ v7.AddArg(dst)
+ v7.AddArg(mem)
+ v5.AddArg(v7)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && n >= o2 + sizeof(t2)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ op2 := mem_0.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ tt2 := op2.Type
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpZero {
+ break
+ }
+ if mem_0_2.AuxInt != n {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[1]
+ p3 := mem_0_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && n >= o2+sizeof(t2)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v1.AuxInt = n
+ v1.Aux = t1
+ v1.AddArg(dst)
+ v1.AddArg(mem)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _)))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ mem_0_0 := mem_0.Args[0]
+ if mem_0_0.Op != OpOffPtr {
+ break
+ }
+ tt2 := mem_0_0.Type
+ o2 := mem_0_0.AuxInt
+ p2 := mem_0_0.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ mem_0_2_0 := mem_0_2.Args[0]
+ if mem_0_2_0.Op != OpOffPtr {
+ break
+ }
+ tt3 := mem_0_2_0.Type
+ o3 := mem_0_2_0.AuxInt
+ p3 := mem_0_2_0.Args[0]
+ d2 := mem_0_2.Args[1]
+ mem_0_2_2 := mem_0_2.Args[2]
+ if mem_0_2_2.Op != OpZero {
+ break
+ }
+ if mem_0_2_2.AuxInt != n {
+ break
+ }
+ t4 := mem_0_2_2.Aux
+ _ = mem_0_2_2.Args[1]
+ p4 := mem_0_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
+ v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v3.AuxInt = n
+ v3.Aux = t1
+ v3.AddArg(dst)
+ v3.AddArg(mem)
+ v1.AddArg(v3)
+ v.AddArg(v1)
+ return true
+ }
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _))))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3) && n >= o4 + sizeof(t4)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
+ break
+ }
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
+ break
+ }
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ mem_0_0 := mem_0.Args[0]
+ if mem_0_0.Op != OpOffPtr {
+ break
+ }
+ tt2 := mem_0_0.Type
+ o2 := mem_0_0.AuxInt
+ p2 := mem_0_0.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
+ break
+ }
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ mem_0_2_0 := mem_0_2.Args[0]
+ if mem_0_2_0.Op != OpOffPtr {
+ break
+ }
+ tt3 := mem_0_2_0.Type
+ o3 := mem_0_2_0.AuxInt
+ p3 := mem_0_2_0.Args[0]
+ d2 := mem_0_2.Args[1]
+ mem_0_2_2 := mem_0_2.Args[2]
+ if mem_0_2_2.Op != OpStore {
+ break
+ }
+ t4 := mem_0_2_2.Aux
+ _ = mem_0_2_2.Args[2]
+ mem_0_2_2_0 := mem_0_2_2.Args[0]
+ if mem_0_2_2_0.Op != OpOffPtr {
break
}
- c := v_1.AuxInt
- if !(c < 0 && c != -1<<7) {
+ tt4 := mem_0_2_2_0.Type
+ o4 := mem_0_2_2_0.AuxInt
+ p4 := mem_0_2_2_0.Args[0]
+ d3 := mem_0_2_2.Args[1]
+ mem_0_2_2_2 := mem_0_2_2.Args[2]
+ if mem_0_2_2_2.Op != OpZero {
break
}
- v.reset(OpMod8)
- v.Type = t
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = -c
- v.AddArg(v0)
- return true
- }
- // match: (Mod8 <t> x (Const8 [c]))
- // cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
- // result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
- for {
- t := v.Type
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if mem_0_2_2_2.AuxInt != n {
break
}
- c := v_1.AuxInt
- if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
+ t5 := mem_0_2_2_2.Aux
+ _ = mem_0_2_2_2.Args[1]
+ p5 := mem_0_2_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3) && n >= o4+sizeof(t4)) {
break
}
- v.reset(OpSub8)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul8, t)
- v1 := b.NewValue0(v.Pos, OpDiv8, t)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst8, t)
- v2.AuxInt = c
- v1.AddArg(v2)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, t)
- v3.AuxInt = c
- v0.AddArg(v3)
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v5.AuxInt = n
+ v5.Aux = t1
+ v5.AddArg(dst)
+ v5.AddArg(mem)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
return true
}
return false
}
-func rewriteValuegeneric_OpMod8u_0(v *Value) bool {
+func rewriteValuegeneric_OpMove_20(v *Value) bool {
b := v.Block
_ = b
- // match: (Mod8u (Const8 [c]) (Const8 [d]))
- // cond: d != 0
- // result: (Const8 [int64(uint8(c) % uint8(d))])
+ // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
+ // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && alignof(t6) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3) && n >= o4 + sizeof(t4) && n >= o5 + sizeof(t5)
+ // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[2]
+ dst := v.Args[0]
+ p1 := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpVarDef {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ mem_0 := mem.Args[0]
+ if mem_0.Op != OpStore {
break
}
- d := v_1.AuxInt
- if !(d != 0) {
+ t2 := mem_0.Aux
+ _ = mem_0.Args[2]
+ mem_0_0 := mem_0.Args[0]
+ if mem_0_0.Op != OpOffPtr {
break
}
- v.reset(OpConst8)
- v.AuxInt = int64(uint8(c) % uint8(d))
- return true
- }
- // match: (Mod8u <t> n (Const8 [c]))
- // cond: isPowerOfTwo(c&0xff)
- // result: (And8 n (Const8 <t> [(c&0xff)-1]))
- for {
- t := v.Type
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ tt2 := mem_0_0.Type
+ o2 := mem_0_0.AuxInt
+ p2 := mem_0_0.Args[0]
+ d1 := mem_0.Args[1]
+ mem_0_2 := mem_0.Args[2]
+ if mem_0_2.Op != OpStore {
break
}
- c := v_1.AuxInt
- if !(isPowerOfTwo(c & 0xff)) {
+ t3 := mem_0_2.Aux
+ _ = mem_0_2.Args[2]
+ mem_0_2_0 := mem_0_2.Args[0]
+ if mem_0_2_0.Op != OpOffPtr {
break
}
- v.reset(OpAnd8)
- v.AddArg(n)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = (c & 0xff) - 1
- v.AddArg(v0)
- return true
- }
- // match: (Mod8u <t> x (Const8 [c]))
- // cond: x.Op != OpConst8 && c > 0 && umagicOK(8 ,c)
- // result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
- for {
- t := v.Type
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ tt3 := mem_0_2_0.Type
+ o3 := mem_0_2_0.AuxInt
+ p3 := mem_0_2_0.Args[0]
+ d2 := mem_0_2.Args[1]
+ mem_0_2_2 := mem_0_2.Args[2]
+ if mem_0_2_2.Op != OpStore {
break
}
- c := v_1.AuxInt
- if !(x.Op != OpConst8 && c > 0 && umagicOK(8, c)) {
+ t4 := mem_0_2_2.Aux
+ _ = mem_0_2_2.Args[2]
+ mem_0_2_2_0 := mem_0_2_2.Args[0]
+ if mem_0_2_2_0.Op != OpOffPtr {
break
}
- v.reset(OpSub8)
- v.AddArg(x)
- v0 := b.NewValue0(v.Pos, OpMul8, t)
- v1 := b.NewValue0(v.Pos, OpDiv8u, t)
- v1.AddArg(x)
- v2 := b.NewValue0(v.Pos, OpConst8, t)
- v2.AuxInt = c
- v1.AddArg(v2)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, t)
- v3.AuxInt = c
- v0.AddArg(v3)
+ tt4 := mem_0_2_2_0.Type
+ o4 := mem_0_2_2_0.AuxInt
+ p4 := mem_0_2_2_0.Args[0]
+ d3 := mem_0_2_2.Args[1]
+ mem_0_2_2_2 := mem_0_2_2.Args[2]
+ if mem_0_2_2_2.Op != OpStore {
+ break
+ }
+ t5 := mem_0_2_2_2.Aux
+ _ = mem_0_2_2_2.Args[2]
+ mem_0_2_2_2_0 := mem_0_2_2_2.Args[0]
+ if mem_0_2_2_2_0.Op != OpOffPtr {
+ break
+ }
+ tt5 := mem_0_2_2_2_0.Type
+ o5 := mem_0_2_2_2_0.AuxInt
+ p5 := mem_0_2_2_2_0.Args[0]
+ d4 := mem_0_2_2_2.Args[1]
+ mem_0_2_2_2_2 := mem_0_2_2_2.Args[2]
+ if mem_0_2_2_2_2.Op != OpZero {
+ break
+ }
+ if mem_0_2_2_2_2.AuxInt != n {
+ break
+ }
+ t6 := mem_0_2_2_2_2.Aux
+ _ = mem_0_2_2_2_2.Args[1]
+ p6 := mem_0_2_2_2_2.Args[0]
+ if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && alignof(t6) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+sizeof(t2) && n >= o3+sizeof(t3) && n >= o4+sizeof(t4) && n >= o5+sizeof(t5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t2
+ v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
+ v0.AuxInt = o2
+ v0.AddArg(dst)
v.AddArg(v0)
+ v.AddArg(d1)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
+ v2.AuxInt = o3
+ v2.AddArg(dst)
+ v1.AddArg(v2)
+ v1.AddArg(d2)
+ v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v3.Aux = t4
+ v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
+ v4.AuxInt = o4
+ v4.AddArg(dst)
+ v3.AddArg(v4)
+ v3.AddArg(d3)
+ v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v5.Aux = t5
+ v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
+ v6.AuxInt = o5
+ v6.AddArg(dst)
+ v5.AddArg(v6)
+ v5.AddArg(d4)
+ v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
+ v7.AuxInt = n
+ v7.Aux = t1
+ v7.AddArg(dst)
+ v7.AddArg(mem)
+ v5.AddArg(v7)
+ v3.AddArg(v5)
+ v1.AddArg(v3)
+ v.AddArg(v1)
return true
}
return false
func rewriteValuegeneric_OpStore_0(v *Value) bool {
b := v.Block
_ = b
- fe := b.Func.fe
- _ = fe
// match: (Store {t1} p1 (Load <t2> p2 mem) mem)
- // cond: isSamePtr(p1, p2) && t2.Size() == t1.(*types.Type).Size()
+ // cond: isSamePtr(p1, p2) && t2.Size() == sizeof(t1)
// result: mem
for {
t1 := v.Aux
if mem != v.Args[2] {
break
}
- if !(isSamePtr(p1, p2) && t2.Size() == t1.(*types.Type).Size()) {
+ if !(isSamePtr(p1, p2) && t2.Size() == sizeof(t1)) {
break
}
v.reset(OpCopy)
v.AddArg(mem)
return true
}
- // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ oldmem))
- // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size())
+ // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
+ // cond: isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3))
// result: mem
for {
t1 := v.Aux
_ = v.Args[2]
- v_0 := v.Args[0]
- if v_0.Op != OpOffPtr {
- break
- }
- o1 := v_0.AuxInt
- p1 := v_0.Args[0]
+ p1 := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpLoad {
break
}
t2 := v_1.Type
_ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpOffPtr {
+ p2 := v_1.Args[0]
+ oldmem := v_1.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t3 := mem.Aux
+ _ = mem.Args[2]
+ p3 := mem.Args[0]
+ if oldmem != mem.Args[2] {
break
}
- if v_1_0.AuxInt != o1 {
+ if !(isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3))) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = mem.Type
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
+ // cond: isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3)) && disjoint(p1, sizeof(t1), p4, sizeof(t4))
+ // result: mem
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ p1 := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLoad {
break
}
- p2 := v_1_0.Args[0]
+ t2 := v_1.Type
+ _ = v_1.Args[1]
+ p2 := v_1.Args[0]
oldmem := v_1.Args[1]
mem := v.Args[2]
if mem.Op != OpStore {
}
t3 := mem.Aux
_ = mem.Args[2]
- mem_0 := mem.Args[0]
- if mem_0.Op != OpOffPtr {
+ p3 := mem.Args[0]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
break
}
- o3 := mem_0.AuxInt
- p3 := mem_0.Args[0]
- if oldmem != mem.Args[2] {
+ t4 := mem_2.Aux
+ _ = mem_2.Args[2]
+ p4 := mem_2.Args[0]
+ if oldmem != mem_2.Args[2] {
break
}
- if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size())) {
+ if !(isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3)) && disjoint(p1, sizeof(t1), p4, sizeof(t4))) {
break
}
v.reset(OpCopy)
v.AddArg(mem)
return true
}
- // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ oldmem)))
- // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size())
+ // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
+ // cond: isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3)) && disjoint(p1, sizeof(t1), p4, sizeof(t4)) && disjoint(p1, sizeof(t1), p5, sizeof(t5))
// result: mem
for {
t1 := v.Aux
_ = v.Args[2]
- v_0 := v.Args[0]
- if v_0.Op != OpOffPtr {
- break
- }
- o1 := v_0.AuxInt
- p1 := v_0.Args[0]
+ p1 := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpLoad {
break
}
t2 := v_1.Type
_ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpOffPtr {
- break
- }
- if v_1_0.AuxInt != o1 {
- break
- }
- p2 := v_1_0.Args[0]
+ p2 := v_1.Args[0]
oldmem := v_1.Args[1]
mem := v.Args[2]
if mem.Op != OpStore {
}
t3 := mem.Aux
_ = mem.Args[2]
- mem_0 := mem.Args[0]
- if mem_0.Op != OpOffPtr {
- break
- }
- o3 := mem_0.AuxInt
- p3 := mem_0.Args[0]
+ p3 := mem.Args[0]
mem_2 := mem.Args[2]
if mem_2.Op != OpStore {
break
}
t4 := mem_2.Aux
_ = mem_2.Args[2]
- mem_2_0 := mem_2.Args[0]
- if mem_2_0.Op != OpOffPtr {
+ p4 := mem_2.Args[0]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpStore {
break
}
- o4 := mem_2_0.AuxInt
- p4 := mem_2_0.Args[0]
- if oldmem != mem_2.Args[2] {
+ t5 := mem_2_2.Aux
+ _ = mem_2_2.Args[2]
+ p5 := mem_2_2.Args[0]
+ if oldmem != mem_2_2.Args[2] {
break
}
- if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size())) {
+ if !(isSamePtr(p1, p2) && t2.Size() == sizeof(t1) && disjoint(p1, sizeof(t1), p3, sizeof(t3)) && disjoint(p1, sizeof(t1), p4, sizeof(t4)) && disjoint(p1, sizeof(t1), p5, sizeof(t5))) {
break
}
v.reset(OpCopy)
v.AddArg(mem)
return true
}
- // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ (Store {t5} (OffPtr [o5] p5) _ oldmem))))
- // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && isSamePtr(p1, p5) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) && !overlap(o1, t2.Size(), o5, t5.(*types.Type).Size())
+ // match: (Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
+ // cond: isConstZero(x) && o >= 0 && sizeof(t) + o <= n && isSamePtr(p1, p2)
// result: mem
for {
- t1 := v.Aux
+ t := v.Aux
_ = v.Args[2]
v_0 := v.Args[0]
if v_0.Op != OpOffPtr {
break
}
- o1 := v_0.AuxInt
+ o := v_0.AuxInt
p1 := v_0.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLoad {
+ x := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpZero {
+ break
+ }
+ n := mem.AuxInt
+ _ = mem.Args[1]
+ p2 := mem.Args[0]
+ if !(isConstZero(x) && o >= 0 && sizeof(t)+o <= n && isSamePtr(p1, p2)) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = mem.Type
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
+ // cond: isConstZero(x) && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p3) && disjoint(op, sizeof(t1), p2, sizeof(t2))
+ // result: mem
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
+ break
+ }
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ x := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ p2 := mem.Args[0]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpZero {
+ break
+ }
+ n := mem_2.AuxInt
+ _ = mem_2.Args[1]
+ p3 := mem_2.Args[0]
+ if !(isConstZero(x) && o1 >= 0 && sizeof(t1)+o1 <= n && isSamePtr(p1, p3) && disjoint(op, sizeof(t1), p2, sizeof(t2))) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = mem.Type
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
+ // cond: isConstZero(x) && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p4) && disjoint(op, sizeof(t1), p2, sizeof(t2)) && disjoint(op, sizeof(t1), p3, sizeof(t3))
+ // result: mem
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
+ break
+ }
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ x := v.Args[1]
+ mem := v.Args[2]
+ if mem.Op != OpStore {
+ break
+ }
+ t2 := mem.Aux
+ _ = mem.Args[2]
+ p2 := mem.Args[0]
+ mem_2 := mem.Args[2]
+ if mem_2.Op != OpStore {
+ break
+ }
+ t3 := mem_2.Aux
+ _ = mem_2.Args[2]
+ p3 := mem_2.Args[0]
+ mem_2_2 := mem_2.Args[2]
+ if mem_2_2.Op != OpZero {
break
}
- t2 := v_1.Type
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpOffPtr {
+ n := mem_2_2.AuxInt
+ _ = mem_2_2.Args[1]
+ p4 := mem_2_2.Args[0]
+ if !(isConstZero(x) && o1 >= 0 && sizeof(t1)+o1 <= n && isSamePtr(p1, p4) && disjoint(op, sizeof(t1), p2, sizeof(t2)) && disjoint(op, sizeof(t1), p3, sizeof(t3))) {
break
}
- if v_1_0.AuxInt != o1 {
+ v.reset(OpCopy)
+ v.Type = mem.Type
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
+ // cond: isConstZero(x) && o1 >= 0 && sizeof(t1) + o1 <= n && isSamePtr(p1, p5) && disjoint(op, sizeof(t1), p2, sizeof(t2)) && disjoint(op, sizeof(t1), p3, sizeof(t3)) && disjoint(op, sizeof(t1), p4, sizeof(t4))
+ // result: mem
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op := v.Args[0]
+ if op.Op != OpOffPtr {
break
}
- p2 := v_1_0.Args[0]
- oldmem := v_1.Args[1]
+ o1 := op.AuxInt
+ p1 := op.Args[0]
+ x := v.Args[1]
mem := v.Args[2]
if mem.Op != OpStore {
break
}
- t3 := mem.Aux
+ t2 := mem.Aux
_ = mem.Args[2]
- mem_0 := mem.Args[0]
- if mem_0.Op != OpOffPtr {
- break
- }
- o3 := mem_0.AuxInt
- p3 := mem_0.Args[0]
+ p2 := mem.Args[0]
mem_2 := mem.Args[2]
if mem_2.Op != OpStore {
break
}
- t4 := mem_2.Aux
+ t3 := mem_2.Aux
_ = mem_2.Args[2]
- mem_2_0 := mem_2.Args[0]
- if mem_2_0.Op != OpOffPtr {
- break
- }
- o4 := mem_2_0.AuxInt
- p4 := mem_2_0.Args[0]
+ p3 := mem_2.Args[0]
mem_2_2 := mem_2.Args[2]
if mem_2_2.Op != OpStore {
break
}
- t5 := mem_2_2.Aux
+ t4 := mem_2_2.Aux
_ = mem_2_2.Args[2]
- mem_2_2_0 := mem_2_2.Args[0]
- if mem_2_2_0.Op != OpOffPtr {
- break
- }
- o5 := mem_2_2_0.AuxInt
- p5 := mem_2_2_0.Args[0]
- if oldmem != mem_2_2.Args[2] {
+ p4 := mem_2_2.Args[0]
+ mem_2_2_2 := mem_2_2.Args[2]
+ if mem_2_2_2.Op != OpZero {
break
}
- if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && isSamePtr(p1, p5) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) && !overlap(o1, t2.Size(), o5, t5.(*types.Type).Size())) {
+ n := mem_2_2_2.AuxInt
+ _ = mem_2_2_2.Args[1]
+ p5 := mem_2_2_2.Args[0]
+ if !(isConstZero(x) && o1 >= 0 && sizeof(t1)+o1 <= n && isSamePtr(p1, p5) && disjoint(op, sizeof(t1), p2, sizeof(t2)) && disjoint(op, sizeof(t1), p3, sizeof(t3)) && disjoint(op, sizeof(t1), p4, sizeof(t4))) {
break
}
v.reset(OpCopy)
v.AddArg(mem)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpStore_10(v *Value) bool {
+ b := v.Block
+ _ = b
+ config := b.Func.Config
+ _ = config
+ fe := b.Func.fe
+ _ = fe
// match: (Store dst (StructMake2 <t> f0 f1) mem)
// cond:
// result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
}
// match: (Store {t} dst (Load src mem) mem)
// cond: !fe.CanSSA(t.(*types.Type))
- // result: (Move {t} [t.(*types.Type).Size()] dst src mem)
+ // result: (Move {t} [sizeof(t)] dst src mem)
for {
t := v.Aux
_ = v.Args[2]
break
}
v.reset(OpMove)
- v.AuxInt = t.(*types.Type).Size()
+ v.AuxInt = sizeof(t)
v.Aux = t
v.AddArg(dst)
v.AddArg(src)
v.AddArg(mem)
return true
}
- return false
-}
-func rewriteValuegeneric_OpStore_10(v *Value) bool {
- b := v.Block
- _ = b
- config := b.Func.Config
- _ = config
- fe := b.Func.fe
- _ = fe
// match: (Store {t} dst (Load src mem) (VarDef {x} mem))
// cond: !fe.CanSSA(t.(*types.Type))
- // result: (Move {t} [t.(*types.Type).Size()] dst src (VarDef {x} mem))
+ // result: (Move {t} [sizeof(t)] dst src (VarDef {x} mem))
for {
t := v.Aux
_ = v.Args[2]
break
}
v.reset(OpMove)
- v.AuxInt = t.(*types.Type).Size()
+ v.AuxInt = sizeof(t)
v.Aux = t
v.AddArg(dst)
v.AddArg(src)
v.AddArg(mem)
return true
}
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == sizeof(t2) && n == sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2) && clobber(m3)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ if op2.AuxInt != 0 {
+ break
+ }
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpMove {
+ break
+ }
+ n := m3.AuxInt
+ _ = m3.Args[2]
+ p3 := m3.Args[0]
+ mem := m3.Args[2]
+ if !(m2.Uses == 1 && m3.Uses == 1 && o1 == sizeof(t2) && n == sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2) && clobber(m3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpStore_20(v *Value) bool {
+ b := v.Block
+ _ = b
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t3) + sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2) && clobber(m3) && clobber(m4)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpStore {
+ break
+ }
+ t3 := m3.Aux
+ _ = m3.Args[2]
+ op3 := m3.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ if op3.AuxInt != 0 {
+ break
+ }
+ p3 := op3.Args[0]
+ d3 := m3.Args[1]
+ m4 := m3.Args[2]
+ if m4.Op != OpMove {
+ break
+ }
+ n := m4.AuxInt
+ _ = m4.Args[2]
+ p4 := m4.Args[0]
+ mem := m4.Args[2]
+ if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t3)+sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2) && clobber(m3) && clobber(m4)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v1.AddArg(op3)
+ v1.AddArg(d3)
+ v1.AddArg(mem)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Move [n] p5 _ mem)))))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t4) + sizeof(t3) + sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpStore {
+ break
+ }
+ t3 := m3.Aux
+ _ = m3.Args[2]
+ op3 := m3.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d3 := m3.Args[1]
+ m4 := m3.Args[2]
+ if m4.Op != OpStore {
+ break
+ }
+ t4 := m4.Aux
+ _ = m4.Args[2]
+ op4 := m4.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ if op4.AuxInt != 0 {
+ break
+ }
+ p4 := op4.Args[0]
+ d4 := m4.Args[1]
+ m5 := m4.Args[2]
+ if m5.Op != OpMove {
+ break
+ }
+ n := m5.AuxInt
+ _ = m5.Args[2]
+ p5 := m5.Args[0]
+ mem := m5.Args[2]
+ if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t4)+sizeof(t3)+sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v1.AddArg(op3)
+ v1.AddArg(d3)
+ v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v2.Aux = t4
+ v2.AddArg(op4)
+ v2.AddArg(d4)
+ v2.AddArg(mem)
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Zero [n] p3 mem)))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == sizeof(t2) && n == sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2) && clobber(m3)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ if op2.AuxInt != 0 {
+ break
+ }
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpZero {
+ break
+ }
+ n := m3.AuxInt
+ _ = m3.Args[1]
+ p3 := m3.Args[0]
+ mem := m3.Args[1]
+ if !(m2.Uses == 1 && m3.Uses == 1 && o1 == sizeof(t2) && n == sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2) && clobber(m3)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Zero [n] p4 mem))))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t3) + sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2) && clobber(m3) && clobber(m4)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpStore {
+ break
+ }
+ t3 := m3.Aux
+ _ = m3.Args[2]
+ op3 := m3.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ if op3.AuxInt != 0 {
+ break
+ }
+ p3 := op3.Args[0]
+ d3 := m3.Args[1]
+ m4 := m3.Args[2]
+ if m4.Op != OpZero {
+ break
+ }
+ n := m4.AuxInt
+ _ = m4.Args[1]
+ p4 := m4.Args[0]
+ mem := m4.Args[1]
+ if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t3)+sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2) && clobber(m3) && clobber(m4)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v1.AddArg(op3)
+ v1.AddArg(d3)
+ v1.AddArg(mem)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Zero [n] p5 mem)))))
+ // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t4) + sizeof(t3) + sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)
+ // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
+ for {
+ t1 := v.Aux
+ _ = v.Args[2]
+ op1 := v.Args[0]
+ if op1.Op != OpOffPtr {
+ break
+ }
+ o1 := op1.AuxInt
+ p1 := op1.Args[0]
+ d1 := v.Args[1]
+ m2 := v.Args[2]
+ if m2.Op != OpStore {
+ break
+ }
+ t2 := m2.Aux
+ _ = m2.Args[2]
+ op2 := m2.Args[0]
+ if op2.Op != OpOffPtr {
+ break
+ }
+ o2 := op2.AuxInt
+ p2 := op2.Args[0]
+ d2 := m2.Args[1]
+ m3 := m2.Args[2]
+ if m3.Op != OpStore {
+ break
+ }
+ t3 := m3.Aux
+ _ = m3.Args[2]
+ op3 := m3.Args[0]
+ if op3.Op != OpOffPtr {
+ break
+ }
+ o3 := op3.AuxInt
+ p3 := op3.Args[0]
+ d3 := m3.Args[1]
+ m4 := m3.Args[2]
+ if m4.Op != OpStore {
+ break
+ }
+ t4 := m4.Aux
+ _ = m4.Args[2]
+ op4 := m4.Args[0]
+ if op4.Op != OpOffPtr {
+ break
+ }
+ if op4.AuxInt != 0 {
+ break
+ }
+ p4 := op4.Args[0]
+ d4 := m4.Args[1]
+ m5 := m4.Args[2]
+ if m5.Op != OpZero {
+ break
+ }
+ n := m5.AuxInt
+ _ = m5.Args[1]
+ p5 := m5.Args[0]
+ mem := m5.Args[1]
+ if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == sizeof(t4) && o2-o3 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t4)+sizeof(t3)+sizeof(t2)+sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2) && clobber(m3) && clobber(m4) && clobber(m5)) {
+ break
+ }
+ v.reset(OpStore)
+ v.Aux = t1
+ v.AddArg(op1)
+ v.AddArg(d1)
+ v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v0.Aux = t2
+ v0.AddArg(op2)
+ v0.AddArg(d2)
+ v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v1.Aux = t3
+ v1.AddArg(op3)
+ v1.AddArg(d3)
+ v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
+ v2.Aux = t4
+ v2.AddArg(op4)
+ v2.AddArg(d4)
+ v2.AddArg(mem)
+ v1.AddArg(v2)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValuegeneric_OpStringLen_0(v *Value) bool {
v.AddArg(mem)
return true
}
+ // match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
+ // cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + sizeof(t2) && clobber(store)
+ // result: (Zero {t1} [n] p1 mem)
+ for {
+ n := v.AuxInt
+ t1 := v.Aux
+ _ = v.Args[1]
+ p1 := v.Args[0]
+ store := v.Args[1]
+ if store.Op != OpStore {
+ break
+ }
+ t2 := store.Aux
+ _ = store.Args[2]
+ store_0 := store.Args[0]
+ if store_0.Op != OpOffPtr {
+ break
+ }
+ o2 := store_0.AuxInt
+ p2 := store_0.Args[0]
+ mem := store.Args[2]
+ if !(isSamePtr(p1, p2) && store.Uses == 1 && n >= o2+sizeof(t2) && clobber(store)) {
+ break
+ }
+ v.reset(OpZero)
+ v.AuxInt = n
+ v.Aux = t1
+ v.AddArg(p1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
+ // cond: move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)
+ // result: (Zero {t} [n] dst1 mem)
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[1]
+ dst1 := v.Args[0]
+ move := v.Args[1]
+ if move.Op != OpMove {
+ break
+ }
+ if move.AuxInt != n {
+ break
+ }
+ if move.Aux != t {
+ break
+ }
+ _ = move.Args[2]
+ dst2 := move.Args[0]
+ mem := move.Args[2]
+ if !(move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)) {
+ break
+ }
+ v.reset(OpZero)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
+ // cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move) && clobber(vardef)
+ // result: (Zero {t} [n] dst1 (VarDef {x} mem))
+ for {
+ n := v.AuxInt
+ t := v.Aux
+ _ = v.Args[1]
+ dst1 := v.Args[0]
+ vardef := v.Args[1]
+ if vardef.Op != OpVarDef {
+ break
+ }
+ x := vardef.Aux
+ move := vardef.Args[0]
+ if move.Op != OpMove {
+ break
+ }
+ if move.AuxInt != n {
+ break
+ }
+ if move.Aux != t {
+ break
+ }
+ _ = move.Args[2]
+ dst2 := move.Args[0]
+ mem := move.Args[2]
+ if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move) && clobber(vardef)) {
+ break
+ }
+ v.reset(OpZero)
+ v.AuxInt = n
+ v.Aux = t
+ v.AddArg(dst1)
+ v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
+ v0.Aux = x
+ v0.AddArg(mem)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValuegeneric_OpZeroExt16to32_0(v *Value) bool {