(Move [2] dst src mem) => (MOVWstore dst (MOVWload src mem) mem)
(Move [4] dst src mem) => (MOVLstore dst (MOVLload src mem) mem)
(Move [8] dst src mem) => (MOVQstore dst (MOVQload src mem) mem)
-(Move [16] dst src mem) && config.useSSE => (MOVOstore dst (MOVOload src mem) mem)
-(Move [16] dst src mem) && !config.useSSE =>
- (MOVQstore [8] dst (MOVQload [8] src mem)
- (MOVQstore dst (MOVQload src mem) mem))
+(Move [16] dst src mem) => (MOVOstore dst (MOVOload src mem) mem)
(Move [32] dst src mem) =>
(Move [16]
(OffPtr <src.Type> src [16])
(Move [16] dst src mem))
-(Move [48] dst src mem) && config.useSSE =>
+(Move [48] dst src mem) =>
(Move [32]
(OffPtr <dst.Type> dst [16])
(OffPtr <src.Type> src [16])
(Move [16] dst src mem))
-(Move [64] dst src mem) && config.useSSE =>
+(Move [64] dst src mem) =>
(Move [32]
(OffPtr <dst.Type> dst [32])
(OffPtr <src.Type> src [32])
(OffPtr <src.Type> src [s%16])
(MOVQstore dst (MOVQload src mem) mem))
(Move [s] dst src mem)
- && s > 16 && s%16 != 0 && s%16 > 8 && config.useSSE =>
+ && s > 16 && s%16 != 0 && s%16 > 8 =>
(Move [s-s%16]
(OffPtr <dst.Type> dst [s%16])
(OffPtr <src.Type> src [s%16])
(MOVOstore dst (MOVOload src mem) mem))
-(Move [s] dst src mem)
- && s > 16 && s%16 != 0 && s%16 > 8 && !config.useSSE =>
- (Move [s-s%16]
- (OffPtr <dst.Type> dst [s%16])
- (OffPtr <src.Type> src [s%16])
- (MOVQstore [8] dst (MOVQload [8] src mem)
- (MOVQstore dst (MOVQload src mem) mem)))
// Medium copying uses a duff device.
(Move [s] dst src mem)
(MOVLstoreconst [makeValAndOff(0,3)] destptr
(MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
-// Strip off any fractional word zeroing.
-(Zero [s] destptr mem) && s%8 != 0 && s > 8 && !config.useSSE =>
- (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
- (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-
// Zero small numbers of words directly.
-(Zero [16] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff(0,8)] destptr
- (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [24] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff(0,16)] destptr
- (MOVQstoreconst [makeValAndOff(0,8)] destptr
- (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)))
-(Zero [32] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff(0,24)] destptr
- (MOVQstoreconst [makeValAndOff(0,16)] destptr
- (MOVQstoreconst [makeValAndOff(0,8)] destptr
- (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))))
-
-(Zero [9] destptr mem) && config.useSSE =>
+(Zero [9] destptr mem) =>
(MOVBstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [10] destptr mem) && config.useSSE =>
+(Zero [10] destptr mem) =>
(MOVWstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [11] destptr mem) && config.useSSE =>
+(Zero [11] destptr mem) =>
(MOVLstoreconst [makeValAndOff(0,7)] destptr
(MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [12] destptr mem) && config.useSSE =>
+(Zero [12] destptr mem) =>
(MOVLstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [s] destptr mem) && s > 12 && s < 16 && config.useSSE =>
+(Zero [s] destptr mem) && s > 12 && s < 16 =>
(MOVQstoreconst [makeValAndOff(0,int32(s-8))] destptr
(MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
// Adjust zeros to be a multiple of 16 bytes.
-(Zero [s] destptr mem) && s%16 != 0 && s > 16 && config.useSSE =>
+(Zero [s] destptr mem) && s%16 != 0 && s > 16 =>
(Zero [s-s%16] (OffPtr <destptr.Type> destptr [s%16])
(MOVOstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [16] destptr mem) && config.useSSE =>
+(Zero [16] destptr mem) =>
(MOVOstoreconst [makeValAndOff(0,0)] destptr mem)
-(Zero [32] destptr mem) && config.useSSE =>
+(Zero [32] destptr mem) =>
(MOVOstoreconst [makeValAndOff(0,16)] destptr
(MOVOstoreconst [makeValAndOff(0,0)] destptr mem))
-(Zero [48] destptr mem) && config.useSSE =>
+(Zero [48] destptr mem) =>
(MOVOstoreconst [makeValAndOff(0,32)] destptr
(MOVOstoreconst [makeValAndOff(0,16)] destptr
(MOVOstoreconst [makeValAndOff(0,0)] destptr mem)))
-(Zero [64] destptr mem) && config.useSSE =>
+(Zero [64] destptr mem) =>
(MOVOstoreconst [makeValAndOff(0,48)] destptr
(MOVOstoreconst [makeValAndOff(0,32)] destptr
(MOVOstoreconst [makeValAndOff(0,16)] destptr
// Large zeroing uses REP STOSQ.
(Zero [s] destptr mem)
- && (s > 1024 || (config.noDuffDevice && s > 64 || !config.useSSE && s > 32))
+ && (s > 1024 || (config.noDuffDevice && s > 64))
&& s%8 == 0 =>
(REPSTOSQ destptr (MOVQconst [s/8]) (MOVQconst [0]) mem)
(LEAQ1 [0] x y) && v.Aux == nil => (ADDQ x y)
(MOVQstoreconst [c] {s} p1 x:(MOVQstoreconst [a] {s} p0 mem))
- && config.useSSE
&& x.Uses == 1
&& sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off()))
&& a.Val() == 0
&& clobber(x)
=> (MOVOstoreconst [makeValAndOff(0,a.Off())] {s} p0 mem)
(MOVQstoreconst [a] {s} p0 x:(MOVQstoreconst [c] {s} p1 mem))
- && config.useSSE
&& x.Uses == 1
&& sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off()))
&& a.Val() == 0
func rewriteValueAMD64_OpAMD64MOVQstoreconst(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- b := v.Block
- config := b.Func.Config
// match: (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd32(off)
// result: (MOVQstoreconst [ValAndOff(sc).addOffset32(off)] {s} ptr mem)
return true
}
// match: (MOVQstoreconst [c] {s} p1 x:(MOVQstoreconst [a] {s} p0 mem))
- // cond: config.useSSE && x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)
+ // cond: x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)
// result: (MOVOstoreconst [makeValAndOff(0,a.Off())] {s} p0 mem)
for {
c := auxIntToValAndOff(v.AuxInt)
}
mem := x.Args[1]
p0 := x.Args[0]
- if !(config.useSSE && x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)) {
+ if !(x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)) {
break
}
v.reset(OpAMD64MOVOstoreconst)
return true
}
// match: (MOVQstoreconst [a] {s} p0 x:(MOVQstoreconst [c] {s} p1 mem))
- // cond: config.useSSE && x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)
+ // cond: x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)
// result: (MOVOstoreconst [makeValAndOff(0,a.Off())] {s} p0 mem)
for {
a := auxIntToValAndOff(v.AuxInt)
}
mem := x.Args[1]
p1 := x.Args[0]
- if !(config.useSSE && x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)) {
+ if !(x.Uses == 1 && sequentialAddresses(p0, p1, int64(a.Off()+8-c.Off())) && a.Val() == 0 && c.Val() == 0 && setPos(v, x.Pos) && clobber(x)) {
break
}
v.reset(OpAMD64MOVOstoreconst)
return true
}
// match: (Move [16] dst src mem)
- // cond: config.useSSE
// result: (MOVOstore dst (MOVOload src mem) mem)
for {
if auxIntToInt64(v.AuxInt) != 16 {
dst := v_0
src := v_1
mem := v_2
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVOstore)
v0 := b.NewValue0(v.Pos, OpAMD64MOVOload, types.TypeInt128)
v0.AddArg2(src, mem)
v.AddArg3(dst, v0, mem)
return true
}
- // match: (Move [16] dst src mem)
- // cond: !config.useSSE
- // result: (MOVQstore [8] dst (MOVQload [8] src mem) (MOVQstore dst (MOVQload src mem) mem))
- for {
- if auxIntToInt64(v.AuxInt) != 16 {
- break
- }
- dst := v_0
- src := v_1
- mem := v_2
- if !(!config.useSSE) {
- break
- }
- v.reset(OpAMD64MOVQstore)
- v.AuxInt = int32ToAuxInt(8)
- v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(8)
- v0.AddArg2(src, mem)
- v1 := b.NewValue0(v.Pos, OpAMD64MOVQstore, types.TypeMem)
- v2 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v2.AddArg2(src, mem)
- v1.AddArg3(dst, v2, mem)
- v.AddArg3(dst, v0, v1)
- return true
- }
// match: (Move [32] dst src mem)
// result: (Move [16] (OffPtr <dst.Type> dst [16]) (OffPtr <src.Type> src [16]) (Move [16] dst src mem))
for {
return true
}
// match: (Move [48] dst src mem)
- // cond: config.useSSE
// result: (Move [32] (OffPtr <dst.Type> dst [16]) (OffPtr <src.Type> src [16]) (Move [16] dst src mem))
for {
if auxIntToInt64(v.AuxInt) != 48 {
dst := v_0
src := v_1
mem := v_2
- if !(config.useSSE) {
- break
- }
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(32)
v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
return true
}
// match: (Move [64] dst src mem)
- // cond: config.useSSE
// result: (Move [32] (OffPtr <dst.Type> dst [32]) (OffPtr <src.Type> src [32]) (Move [32] dst src mem))
for {
if auxIntToInt64(v.AuxInt) != 64 {
dst := v_0
src := v_1
mem := v_2
- if !(config.useSSE) {
- break
- }
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(32)
v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
return true
}
// match: (Move [s] dst src mem)
- // cond: s > 16 && s%16 != 0 && s%16 > 8 && config.useSSE
+ // cond: s > 16 && s%16 != 0 && s%16 > 8
// result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (MOVOstore dst (MOVOload src mem) mem))
for {
s := auxIntToInt64(v.AuxInt)
dst := v_0
src := v_1
mem := v_2
- if !(s > 16 && s%16 != 0 && s%16 > 8 && config.useSSE) {
+ if !(s > 16 && s%16 != 0 && s%16 > 8) {
break
}
v.reset(OpMove)
return true
}
// match: (Move [s] dst src mem)
- // cond: s > 16 && s%16 != 0 && s%16 > 8 && !config.useSSE
- // result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (MOVQstore [8] dst (MOVQload [8] src mem) (MOVQstore dst (MOVQload src mem) mem)))
- for {
- s := auxIntToInt64(v.AuxInt)
- dst := v_0
- src := v_1
- mem := v_2
- if !(s > 16 && s%16 != 0 && s%16 > 8 && !config.useSSE) {
- break
- }
- v.reset(OpMove)
- v.AuxInt = int64ToAuxInt(s - s%16)
- v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
- v0.AuxInt = int64ToAuxInt(s % 16)
- v0.AddArg(dst)
- v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
- v1.AuxInt = int64ToAuxInt(s % 16)
- v1.AddArg(src)
- v2 := b.NewValue0(v.Pos, OpAMD64MOVQstore, types.TypeMem)
- v2.AuxInt = int32ToAuxInt(8)
- v3 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v3.AuxInt = int32ToAuxInt(8)
- v3.AddArg2(src, mem)
- v4 := b.NewValue0(v.Pos, OpAMD64MOVQstore, types.TypeMem)
- v5 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v5.AddArg2(src, mem)
- v4.AddArg3(dst, v5, mem)
- v2.AddArg3(dst, v3, v4)
- v.AddArg3(v0, v1, v2)
- return true
- }
- // match: (Move [s] dst src mem)
// cond: s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
// result: (DUFFCOPY [s] dst src mem)
for {
v.AddArg2(destptr, v0)
return true
}
- // match: (Zero [s] destptr mem)
- // cond: s%8 != 0 && s > 8 && !config.useSSE
- // result: (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
- for {
- s := auxIntToInt64(v.AuxInt)
- destptr := v_0
- mem := v_1
- if !(s%8 != 0 && s > 8 && !config.useSSE) {
- break
- }
- v.reset(OpZero)
- v.AuxInt = int64ToAuxInt(s - s%8)
- v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type)
- v0.AuxInt = int64ToAuxInt(s % 8)
- v0.AddArg(destptr)
- v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
- v1.AddArg2(destptr, mem)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Zero [16] destptr mem)
- // cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
- for {
- if auxIntToInt64(v.AuxInt) != 16 {
- break
- }
- destptr := v_0
- mem := v_1
- if !(!config.useSSE) {
- break
- }
- v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
- v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
- v0.AddArg2(destptr, mem)
- v.AddArg2(destptr, v0)
- return true
- }
- // match: (Zero [24] destptr mem)
- // cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)))
- for {
- if auxIntToInt64(v.AuxInt) != 24 {
- break
- }
- destptr := v_0
- mem := v_1
- if !(!config.useSSE) {
- break
- }
- v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 16))
- v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
- v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
- v1.AddArg2(destptr, mem)
- v0.AddArg2(destptr, v1)
- v.AddArg2(destptr, v0)
- return true
- }
- // match: (Zero [32] destptr mem)
- // cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))))
- for {
- if auxIntToInt64(v.AuxInt) != 32 {
- break
- }
- destptr := v_0
- mem := v_1
- if !(!config.useSSE) {
- break
- }
- v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 24))
- v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 16))
- v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
- v2 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v2.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
- v2.AddArg2(destptr, mem)
- v1.AddArg2(destptr, v2)
- v0.AddArg2(destptr, v1)
- v.AddArg2(destptr, v0)
- return true
- }
// match: (Zero [9] destptr mem)
- // cond: config.useSSE
// result: (MOVBstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 9 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVBstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
return true
}
// match: (Zero [10] destptr mem)
- // cond: config.useSSE
// result: (MOVWstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 10 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVWstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
return true
}
// match: (Zero [11] destptr mem)
- // cond: config.useSSE
// result: (MOVLstoreconst [makeValAndOff(0,7)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 11 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVLstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 7))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
return true
}
// match: (Zero [12] destptr mem)
- // cond: config.useSSE
// result: (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 12 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVLstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
return true
}
// match: (Zero [s] destptr mem)
- // cond: s > 12 && s < 16 && config.useSSE
+ // cond: s > 12 && s < 16
// result: (MOVQstoreconst [makeValAndOff(0,int32(s-8))] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
mem := v_1
- if !(s > 12 && s < 16 && config.useSSE) {
+ if !(s > 12 && s < 16) {
break
}
v.reset(OpAMD64MOVQstoreconst)
return true
}
// match: (Zero [s] destptr mem)
- // cond: s%16 != 0 && s > 16 && config.useSSE
+ // cond: s%16 != 0 && s > 16
// result: (Zero [s-s%16] (OffPtr <destptr.Type> destptr [s%16]) (MOVOstoreconst [makeValAndOff(0,0)] destptr mem))
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
mem := v_1
- if !(s%16 != 0 && s > 16 && config.useSSE) {
+ if !(s%16 != 0 && s > 16) {
break
}
v.reset(OpZero)
return true
}
// match: (Zero [16] destptr mem)
- // cond: config.useSSE
// result: (MOVOstoreconst [makeValAndOff(0,0)] destptr mem)
for {
if auxIntToInt64(v.AuxInt) != 16 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVOstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v.AddArg2(destptr, mem)
return true
}
// match: (Zero [32] destptr mem)
- // cond: config.useSSE
// result: (MOVOstoreconst [makeValAndOff(0,16)] destptr (MOVOstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 32 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVOstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 16))
v0 := b.NewValue0(v.Pos, OpAMD64MOVOstoreconst, types.TypeMem)
return true
}
// match: (Zero [48] destptr mem)
- // cond: config.useSSE
// result: (MOVOstoreconst [makeValAndOff(0,32)] destptr (MOVOstoreconst [makeValAndOff(0,16)] destptr (MOVOstoreconst [makeValAndOff(0,0)] destptr mem)))
for {
if auxIntToInt64(v.AuxInt) != 48 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVOstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 32))
v0 := b.NewValue0(v.Pos, OpAMD64MOVOstoreconst, types.TypeMem)
return true
}
// match: (Zero [64] destptr mem)
- // cond: config.useSSE
// result: (MOVOstoreconst [makeValAndOff(0,48)] destptr (MOVOstoreconst [makeValAndOff(0,32)] destptr (MOVOstoreconst [makeValAndOff(0,16)] destptr (MOVOstoreconst [makeValAndOff(0,0)] destptr mem))))
for {
if auxIntToInt64(v.AuxInt) != 64 {
}
destptr := v_0
mem := v_1
- if !(config.useSSE) {
- break
- }
v.reset(OpAMD64MOVOstoreconst)
v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 48))
v0 := b.NewValue0(v.Pos, OpAMD64MOVOstoreconst, types.TypeMem)
return true
}
// match: (Zero [s] destptr mem)
- // cond: (s > 1024 || (config.noDuffDevice && s > 64 || !config.useSSE && s > 32)) && s%8 == 0
+ // cond: (s > 1024 || (config.noDuffDevice && s > 64)) && s%8 == 0
// result: (REPSTOSQ destptr (MOVQconst [s/8]) (MOVQconst [0]) mem)
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
mem := v_1
- if !((s > 1024 || (config.noDuffDevice && s > 64 || !config.useSSE && s > 32)) && s%8 == 0) {
+ if !((s > 1024 || (config.noDuffDevice && s > 64)) && s%8 == 0) {
break
}
v.reset(OpAMD64REPSTOSQ)