return false
}
-func isInlinableMemclr(c *Config) bool {
+func isInlinableMemclr(c *Config, sz int64) bool {
// TODO: expand this check to allow other architectures
// see CL 454255 and issue 56997
- return c.arch == "amd64" || c.arch == "arm64"
+ switch c.arch {
+ case "amd64", "arm64":
+ return true
+ case "ppc64le", "ppc64":
+ return sz < 512
+ }
+ return false
}
// isInlinableMemmove reports whether the given arch performs a Move of the given size
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
- // cond: isInlinableMemclr(config) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
}
c := auxIntToInt64(call_1.AuxInt)
- if !(isInlinableMemclr(config) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
- // cond: isInlinableMemclr(config) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
}
c := auxIntToInt32(call_1.AuxInt)
- if !(isInlinableMemclr(config) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// amd64:"MOVUPS\tX15"
- // ppc64x:`.*runtime\.memclrNoHeapPointers`
+ // ppc64x:-`.*runtime\.memclrNoHeapPointers`
// ppc64x:-`.*runtime\.makeslice`
// ppc64x:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, 1<<2)...)
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// amd64:"MOVUPS\tX15"
- // ppc64x:`.*runtime\.memclrNoHeapPointers`
+ // ppc64x:-`.*runtime\.memclrNoHeapPointers`
// ppc64x:-`.*runtime\.makeslice`
// ppc64x:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, int64(1<<2))...)
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// amd64:"MOVUPS\tX15"
- // ppc64x:`.*runtime\.memclrNoHeapPointers`
+ // ppc64x:-`.*runtime\.memclrNoHeapPointers`
// ppc64x:-`.*runtime\.makeslice`
// ppc64x:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, uint64(1<<2))...)
// amd64:-`.*runtime\.makeslice`
// amd64:-`.*runtime\.panicmakeslicelen`
// amd64:"MOVUPS\tX15"
- // ppc64x:`.*runtime\.memclrNoHeapPointers`
+ // ppc64x:-`.*runtime\.memclrNoHeapPointers`
// ppc64x:-`.*runtime\.makeslice`
// ppc64x:-`.*runtime\.panicmakeslicelen`
return append(s, make([]int, uint(1<<2))...)
}
+// On ppc64x continue to use memclrNoHeapPointers
+// for sizes >= 512.
+func SliceExtensionConst512(s []int) []int {
+ // amd64:-`.*runtime\.memclrNoHeapPointers`
+ // ppc64x:`.*runtime\.memclrNoHeapPointers`
+ return append(s, make([]int, 1<<9)...)
+}
+
func SliceExtensionPointer(s []*int, l int) []*int {
// amd64:`.*runtime\.memclrHasPointers`
// amd64:-`.*runtime\.makeslice`