]> Cypherpunks repositories - gostls13.git/commitdiff
runtime: support runtime.freegc in size-specialized mallocs for noscan objects
authorthepudds <thepudds1460@gmail.com>
Sun, 9 Nov 2025 14:24:22 +0000 (09:24 -0500)
committert hepudds <thepudds1460@gmail.com>
Fri, 14 Nov 2025 22:23:16 +0000 (14:23 -0800)
This CL is part of a set of CLs that attempt to reduce how much work the
GC must do. See the design in https://go.dev/design/74299-runtime-freegc

This CL updates the smallNoScanStub stub in malloc_stubs.go to reuse
heap objects that have been freed by runtime.freegc calls, and generates
the corresponding size-specialized code in malloc_generated.go.

This CL only adds support in the specialized mallocs for noscan
heap objects (objects without pointers). A later CL handles objects
with pointers.

While we are here, we leave a couple of breadcrumbs in mkmalloc.go on
how to do the generation.

Updates #74299

Change-Id: I2657622601a27211554ee862fce057e101767a70
Reviewed-on: https://go-review.googlesource.com/c/go/+/715761
Reviewed-by: Junyang Shao <shaojunyang@google.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Michael Knyszek <mknyszek@google.com>
src/runtime/_mkmalloc/mkmalloc.go
src/runtime/malloc.go
src/runtime/malloc_generated.go
src/runtime/malloc_stubs.go
src/runtime/malloc_test.go

index 986b0aa9f85d6845a8d99787a37769dff96b8302..1f040c88610dda29a91eb36e658000fd1e2ae427 100644 (file)
@@ -254,7 +254,8 @@ func inline(config generatorConfig) []byte {
        }
 
        // Write out the package and import declarations.
-       out.WriteString("// Code generated by mkmalloc.go; DO NOT EDIT.\n\n")
+       out.WriteString("// Code generated by mkmalloc.go; DO NOT EDIT.\n")
+       out.WriteString("// See overview in malloc_stubs.go.\n\n")
        out.WriteString("package " + f.Name.Name + "\n\n")
        for _, importDecl := range importDecls {
                out.Write(mustFormatNode(fset, importDecl))
index 13f5fc30814bcb7545f873ab7ac5ae4f51fc7660..d49dacaf68604bda02dee9daad933df5dc98b746 100644 (file)
@@ -1094,6 +1094,8 @@ const sizeSpecializedMallocEnabled = goexperiment.SizeSpecializedMalloc && GOOS
 // implementation and the corresponding allocation-related changes: the experiment must be
 // enabled, and none of the memory sanitizers should be enabled. We allow the race detector,
 // in contrast to sizeSpecializedMallocEnabled.
+// TODO(thepudds): it would be nice to check Valgrind integration, though there are some hints
+// there might not be any canned tests in tree for Go's integration with Valgrind.
 const runtimeFreegcEnabled = goexperiment.RuntimeFreegc && !asanenabled && !msanenabled && !valgrindenabled
 
 // Allocate an object of size bytes.
@@ -1966,10 +1968,15 @@ const (
 // or roughly when the liveness analysis of the compiler
 // would otherwise have determined ptr's object is reclaimable by the GC.
 func freegc(ptr unsafe.Pointer, size uintptr, noscan bool) bool {
-       if !runtimeFreegcEnabled || sizeSpecializedMallocEnabled || !reusableSize(size) {
-               // TODO(thepudds): temporarily disable freegc with SizeSpecializedMalloc until we finish integrating.
+       if !runtimeFreegcEnabled || !reusableSize(size) {
                return false
        }
+       if sizeSpecializedMallocEnabled && !noscan {
+               // TODO(thepudds): temporarily disable freegc with SizeSpecializedMalloc for pointer types
+               // until we finish integrating.
+               return false
+       }
+
        if ptr == nil {
                throw("freegc nil")
        }
index 2215dbaddb2e1cea224c55e68d4f2a8586489438..5abb61257a415128a79ba82b8ef7216d9f4c7bd0 100644 (file)
@@ -1,4 +1,5 @@
 // Code generated by mkmalloc.go; DO NOT EDIT.
+// See overview in malloc_stubs.go.
 
 package runtime
 
@@ -6400,6 +6401,32 @@ func mallocgcSmallNoScanSC2(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6497,6 +6524,32 @@ func mallocgcSmallNoScanSC3(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6594,6 +6647,32 @@ func mallocgcSmallNoScanSC4(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6691,6 +6770,32 @@ func mallocgcSmallNoScanSC5(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6788,6 +6893,32 @@ func mallocgcSmallNoScanSC6(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6885,6 +7016,32 @@ func mallocgcSmallNoScanSC7(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -6982,6 +7139,32 @@ func mallocgcSmallNoScanSC8(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7079,6 +7262,32 @@ func mallocgcSmallNoScanSC9(size uintptr, typ *_type, needzero bool) unsafe.Poin
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7176,6 +7385,32 @@ func mallocgcSmallNoScanSC10(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7273,6 +7508,32 @@ func mallocgcSmallNoScanSC11(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7370,6 +7631,32 @@ func mallocgcSmallNoScanSC12(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7467,6 +7754,32 @@ func mallocgcSmallNoScanSC13(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7564,6 +7877,32 @@ func mallocgcSmallNoScanSC14(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7661,6 +8000,32 @@ func mallocgcSmallNoScanSC15(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7758,6 +8123,32 @@ func mallocgcSmallNoScanSC16(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7855,6 +8246,32 @@ func mallocgcSmallNoScanSC17(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -7952,6 +8369,32 @@ func mallocgcSmallNoScanSC18(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8049,6 +8492,32 @@ func mallocgcSmallNoScanSC19(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8146,6 +8615,32 @@ func mallocgcSmallNoScanSC20(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8243,6 +8738,32 @@ func mallocgcSmallNoScanSC21(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8340,6 +8861,32 @@ func mallocgcSmallNoScanSC22(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8437,6 +8984,32 @@ func mallocgcSmallNoScanSC23(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8534,6 +9107,32 @@ func mallocgcSmallNoScanSC24(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8631,6 +9230,32 @@ func mallocgcSmallNoScanSC25(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
@@ -8728,6 +9353,32 @@ func mallocgcSmallNoScanSC26(size uintptr, typ *_type, needzero bool) unsafe.Poi
        const spc = spanClass(sizeclass<<1) | spanClass(1)
        span := c.alloc[spc]
 
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+               x := v
+               {
+
+                       if valgrindenabled {
+                               valgrindMalloc(x, size)
+                       }
+
+                       if gcBlackenEnabled != 0 && elemsize != 0 {
+                               if assistG := getg().m.curg; assistG != nil {
+                                       assistG.gcAssistBytes -= int64(elemsize - size)
+                               }
+                       }
+
+                       if debug.malloc {
+                               postMallocgcDebug(x, elemsize, typ)
+                       }
+                       return x
+               }
+
+       }
+
        var nextFreeFastResult gclinkptr
        if span.allocCache != 0 {
                theBit := sys.TrailingZeros64(span.allocCache)
index 224746f3d411247b75b89fe5e64adac20eb9dd6a..e9752956b82e0d745de41f20fc1ef34d6ef7e441 100644 (file)
@@ -7,6 +7,8 @@
 // to produce a full mallocgc function that's specialized for a span class
 // or specific size in the case of the tiny allocator.
 //
+// To generate the specialized mallocgc functions, do 'go run .' inside runtime/_mkmalloc.
+//
 // To assemble a mallocgc function, the mallocStub function is cloned, and the call to
 // inlinedMalloc is replaced with the inlined body of smallScanNoHeaderStub,
 // smallNoScanStub or tinyStub, depending on the parameters being specialized.
@@ -71,7 +73,8 @@ func mallocStub(size uintptr, typ *_type, needzero bool) unsafe.Pointer {
                }
        }
 
-       // Assist the GC if needed.
+       // Assist the GC if needed. (On the reuse path, we currently compensate for this;
+       // changes here might require changes there.)
        if gcBlackenEnabled != 0 {
                deductAssistCredit(size)
        }
@@ -242,6 +245,23 @@ func smallNoScanStub(size uintptr, typ *_type, needzero bool) (unsafe.Pointer, u
        c := getMCache(mp)
        const spc = spanClass(sizeclass<<1) | spanClass(noscanint_)
        span := c.alloc[spc]
+
+       // First, check for a reusable object.
+       if runtimeFreegcEnabled && c.hasReusableNoscan(spc) {
+               // We have a reusable object, use it.
+               v := mallocgcSmallNoscanReuse(c, span, spc, elemsize, needzero)
+               mp.mallocing = 0
+               releasem(mp)
+
+               // TODO(thepudds): note that the generated return path is essentially duplicated
+               // by the generator. For example, see the two postMallocgcDebug calls and
+               // related duplicated code on the return path currently in the generated
+               // mallocgcSmallNoScanSC2 function. One set of those correspond to this
+               // return here. We might be able to de-duplicate the generated return path
+               // by updating the generator, perhaps by jumping to a shared return or similar.
+               return v, elemsize
+       }
+
        v := nextFreeFastStub(span)
        if v == 0 {
                v, span, checkGCTrigger = c.nextFree(spc)
index 10c20e6c2323a38a4d773537033aa1115c691e00..97cf0eed54c77ceac805b00ee942f13389e69a1f 100644 (file)
@@ -349,8 +349,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) {
                t.Run("allocs-with-free", func(t *testing.T) {
                        // Same allocations, but now using explicit free so that
                        // no allocs get reported. (Again, not the desired long-term behavior).
-                       if SizeSpecializedMallocEnabled {
-                               t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc")
+                       if SizeSpecializedMallocEnabled && !noscan {
+                               // TODO(thepudds): skip at this point in the stack for size-specialized malloc
+                               // with !noscan. Additional integration with sizespecializedmalloc is in a later CL.
+                               t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc for pointer types")
                        }
                        if !RuntimeFreegcEnabled {
                                t.Skip("skipping alloc tests with runtime.freegc disabled")
@@ -370,8 +372,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) {
                        // Multiple allocations outstanding before explicitly freeing,
                        // but still within the limit of our smallest free list size
                        // so that no allocs are reported. (Again, not long-term behavior).
-                       if SizeSpecializedMallocEnabled {
-                               t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc")
+                       if SizeSpecializedMallocEnabled && !noscan {
+                               // TODO(thepudds): skip at this point in the stack for size-specialized malloc
+                               // with !noscan. Additional integration with sizespecializedmalloc is in a later CL.
+                               t.Skip("temporarily skipping alloc tests for GOEXPERIMENT=sizespecializedmalloc for pointer types")
                        }
                        if !RuntimeFreegcEnabled {
                                t.Skip("skipping alloc tests with runtime.freegc disabled")
@@ -514,10 +518,10 @@ func testFreegc[T comparable](noscan bool) func(*testing.T) {
                        // See https://go.dev/cl/717520 for some additional discussion,
                        // including how we can deliberately cause the test to fail currently
                        // if we purposefully introduce some assist credit bugs.
-                       if SizeSpecializedMallocEnabled {
+                       if SizeSpecializedMallocEnabled && !noscan {
                                // TODO(thepudds): skip this test at this point in the stack; later CL has
                                // integration with sizespecializedmalloc.
-                               t.Skip("temporarily skip assist credit test for GOEXPERIMENT=sizespecializedmalloc")
+                               t.Skip("temporarily skip assist credit tests for GOEXPERIMENT=sizespecializedmalloc for pointer types")
                        }
                        if !RuntimeFreegcEnabled {
                                t.Skip("skipping assist credit test with runtime.freegc disabled")