// ios/arm64 40 4MB 1 256K (2MB)
// */32-bit 32 4MB 1 1024 (4KB)
// */mips(le) 31 4MB 1 512 (2KB)
+ // wasm 32 512KB 1 8192 (64KB)
// heapArenaBytes is the size of a heap arena. The heap
// consists of mappings of size heapArenaBytes, aligned to
// heapArenaBytes. The initial heap mapping is one arena.
//
- // This is currently 64MB on 64-bit non-Windows and 4MB on
- // 32-bit and on Windows. We use smaller arenas on Windows
- // because all committed memory is charged to the process,
- // even if it's not touched. Hence, for processes with small
- // heaps, the mapped arena space needs to be commensurate.
- // This is particularly important with the race detector,
- // since it significantly amplifies the cost of committed
- // memory.
+ // This is currently 64MB on 64-bit non-Windows, 4MB on
+ // 32-bit and on Windows, and 512KB on Wasm. We use smaller
+ // arenas on Windows because all committed memory is charged
+ // to the process, even if it's not touched. Hence, for
+ // processes with small heaps, the mapped arena space needs
+ // to be commensurate. This is particularly important with
+ // the race detector, since it significantly amplifies the
+ // cost of committed memory. We use smaller arenas on Wasm
+ // because some Wasm programs have very small heap, and
+ // everything in the Wasm linear memory is charged.
heapArenaBytes = 1 << logHeapArenaBytes
heapArenaWords = heapArenaBytes / goarch.PtrSize
// logHeapArenaBytes is log_2 of heapArenaBytes. For clarity,
// prefer using heapArenaBytes where possible (we need the
// constant to compute some other constants).
- logHeapArenaBytes = (6+20)*(_64bit*(1-goos.IsWindows)*(1-goarch.IsWasm)*(1-goos.IsIos*goarch.IsArm64)) + (2+20)*(_64bit*goos.IsWindows) + (2+20)*(1-_64bit) + (2+20)*goarch.IsWasm + (2+20)*goos.IsIos*goarch.IsArm64
+ logHeapArenaBytes = (6+20)*(_64bit*(1-goos.IsWindows)*(1-goarch.IsWasm)*(1-goos.IsIos*goarch.IsArm64)) + (2+20)*(_64bit*goos.IsWindows) + (2+20)*(1-_64bit) + (9+10)*goarch.IsWasm + (2+20)*goos.IsIos*goarch.IsArm64
// heapArenaBitmapWords is the size of each heap arena's bitmap in uintptrs.
heapArenaBitmapWords = heapArenaWords / (8 * goarch.PtrSize)
//
// Must be a multiple of the pageInUse bitmap element size and
// must also evenly divide pagesPerArena.
- pagesPerSpanRoot = 512
+ pagesPerSpanRoot = min(512, pagesPerArena)
)
// gcPrepareMarkRoots queues root scanning jobs (stacks, globals, and
max: PallocChunkPages,
want: BitRange{41, 1},
},
- "MultiMin1": {
- alloc: []BitRange{{0, 63}, {65, 20}, {87, PallocChunkPages - 87}},
+ }
+ if PallocChunkPages >= 512 {
+ // avoid constant overflow when PallocChunkPages is small
+ var pallocChunkPages uint = PallocChunkPages
+ tests["MultiMin1"] = test{
+ alloc: []BitRange{{0, 63}, {65, 20}, {87, pallocChunkPages - 87}},
scavenged: []BitRange{{86, 1}},
min: 1,
max: PallocChunkPages,
want: BitRange{85, 1},
- },
+ }
}
// Try out different page minimums.
for m := uintptr(1); m <= 64; m *= 2 {
max: PallocChunkPages,
want: BitRange{PallocChunkPages - uint(m), uint(m)},
}
- tests["Straddle64"+suffix] = test{
- alloc: []BitRange{{0, 64 - uint(m)}, {64 + uint(m), PallocChunkPages - (64 + uint(m))}},
- min: m,
- max: 2 * m,
- want: BitRange{64 - uint(m), 2 * uint(m)},
- }
- tests["BottomEdge64WithFull"+suffix] = test{
- alloc: []BitRange{{64, 64}, {128 + 3*uint(m), PallocChunkPages - (128 + 3*uint(m))}},
- scavenged: []BitRange{{1, 10}},
- min: m,
- max: 3 * m,
- want: BitRange{128, 3 * uint(m)},
- }
- tests["BottomEdge64WithPocket"+suffix] = test{
- alloc: []BitRange{{64, 62}, {127, 1}, {128 + 3*uint(m), PallocChunkPages - (128 + 3*uint(m))}},
- scavenged: []BitRange{{1, 10}},
- min: m,
- max: 3 * m,
- want: BitRange{128, 3 * uint(m)},
+ if PallocChunkPages >= 512 {
+ tests["Straddle64"+suffix] = test{
+ alloc: []BitRange{{0, 64 - uint(m)}, {64 + uint(m), PallocChunkPages - (64 + uint(m))}},
+ min: m,
+ max: 2 * m,
+ want: BitRange{64 - uint(m), 2 * uint(m)},
+ }
+ tests["BottomEdge64WithFull"+suffix] = test{
+ alloc: []BitRange{{64, 64}, {128 + 3*uint(m), PallocChunkPages - (128 + 3*uint(m))}},
+ scavenged: []BitRange{{1, 10}},
+ min: m,
+ max: 3 * m,
+ want: BitRange{128, 3 * uint(m)},
+ }
+ tests["BottomEdge64WithPocket"+suffix] = test{
+ alloc: []BitRange{{64, 62}, {127, 1}, {128 + 3*uint(m), PallocChunkPages - (128 + 3*uint(m))}},
+ scavenged: []BitRange{{1, 10}},
+ min: m,
+ max: 3 * m,
+ want: BitRange{128, 3 * uint(m)},
+ }
}
tests["Max0"+suffix] = test{
scavenged: []BitRange{{0, PallocChunkPages - uint(m)}},
}
}
if m > 1 {
- tests["MaxUnaligned"+suffix] = test{
- scavenged: []BitRange{{0, PallocChunkPages - uint(m*2-1)}},
- min: m,
- max: m - 2,
- want: BitRange{PallocChunkPages - uint(m), uint(m)},
- }
- tests["SkipSmall"+suffix] = test{
- alloc: []BitRange{{0, 64 - uint(m)}, {64, 5}, {70, 11}, {82, PallocChunkPages - 82}},
- min: m,
- max: m,
- want: BitRange{64 - uint(m), uint(m)},
+ if PallocChunkPages >= m*2 {
+ tests["MaxUnaligned"+suffix] = test{
+ scavenged: []BitRange{{0, PallocChunkPages - uint(m*2-1)}},
+ min: m,
+ max: m - 2,
+ want: BitRange{PallocChunkPages - uint(m), uint(m)},
+ }
}
- tests["SkipMisaligned"+suffix] = test{
- alloc: []BitRange{{0, 64 - uint(m)}, {64, 63}, {127 + uint(m), PallocChunkPages - (127 + uint(m))}},
- min: m,
- max: m,
- want: BitRange{64 - uint(m), uint(m)},
+ if PallocChunkPages >= 512 {
+ // avoid constant overflow when PallocChunkPages is small
+ var PallocChunkPages uint = PallocChunkPages
+ tests["SkipSmall"+suffix] = test{
+ alloc: []BitRange{{0, 64 - uint(m)}, {64, 5}, {70, 11}, {82, PallocChunkPages - 82}},
+ min: m,
+ max: m,
+ want: BitRange{64 - uint(m), uint(m)},
+ }
+ tests["SkipMisaligned"+suffix] = test{
+ alloc: []BitRange{{0, 64 - uint(m)}, {64, 63}, {127 + uint(m), PallocChunkPages - (127 + uint(m))}},
+ min: m,
+ max: m,
+ want: BitRange{64 - uint(m), uint(m)},
+ }
}
tests["MaxLessThan"+suffix] = test{
scavenged: []BitRange{{0, PallocChunkPages - uint(m)}},
mark func(markFunc)
find func(findFunc)
}
- for _, test := range []testCase{
+ tests := []testCase{
{
name: "Uninitialized",
mark: func(_ markFunc) {},
find(BaseChunkIdx, PallocChunkPages-1)
},
},
- {
- name: "TwoChunks",
- mark: func(mark markFunc) {
- mark(PageBase(BaseChunkIdx, 128), PageBase(BaseChunkIdx+1, 128))
- },
- find: func(find findFunc) {
- find(BaseChunkIdx+1, 127)
- find(BaseChunkIdx, PallocChunkPages-1)
- },
- },
- {
- name: "TwoChunksOffset",
- mark: func(mark markFunc) {
- mark(PageBase(BaseChunkIdx+7, 128), PageBase(BaseChunkIdx+8, 129))
- },
- find: func(find findFunc) {
- find(BaseChunkIdx+8, 128)
- find(BaseChunkIdx+7, PallocChunkPages-1)
- },
- },
{
name: "SevenChunksOffset",
mark: func(mark markFunc) {
}
},
},
- } {
+ }
+ if PallocChunkPages >= 512 {
+ tests = append(tests,
+ testCase{
+ name: "TwoChunks",
+ mark: func(mark markFunc) {
+ mark(PageBase(BaseChunkIdx, 128), PageBase(BaseChunkIdx+1, 128))
+ },
+ find: func(find findFunc) {
+ find(BaseChunkIdx+1, 127)
+ find(BaseChunkIdx, PallocChunkPages-1)
+ },
+ },
+ testCase{
+ name: "TwoChunksOffset",
+ mark: func(mark markFunc) {
+ mark(PageBase(BaseChunkIdx+7, 128), PageBase(BaseChunkIdx+8, 129))
+ },
+ find: func(find findFunc) {
+ find(BaseChunkIdx+8, 128)
+ find(BaseChunkIdx+7, PallocChunkPages-1)
+ },
+ },
+ )
+ }
+ for _, test := range tests {
test := test
t.Run("Bg/"+test.name, func(t *testing.T) {
mark, find, nextGen := setup(t, false)
}
func TestScavChunkDataPack(t *testing.T) {
- if !CheckPackScavChunkData(1918237402, 512, 512, 0b11) {
- t.Error("failed pack/unpack check for scavChunkData 1")
+ if PallocChunkPages >= 512 {
+ if !CheckPackScavChunkData(1918237402, 512, 512, 0b11) {
+ t.Error("failed pack/unpack check for scavChunkData 1")
+ }
}
if !CheckPackScavChunkData(^uint32(0), 12, 0, 0b00) {
t.Error("failed pack/unpack check for scavChunkData 2")
//
// Must be a multiple of the pageInUse bitmap element size and
// must also evenly divide pagesPerArena.
- pagesPerReclaimerChunk = 512
+ pagesPerReclaimerChunk = min(512, pagesPerArena)
// physPageAlignedStacks indicates whether stack allocations must be
// physical page aligned. This is a requirement for MAP_STACK on
package runtime
import (
+ "internal/goarch"
"internal/runtime/atomic"
"internal/runtime/gc"
"unsafe"
const (
// The size of a bitmap chunk, i.e. the amount of bits (that is, pages) to consider
- // in the bitmap at once.
+ // in the bitmap at once. It is 4MB on most platforms, except on Wasm it is 512KB.
+ // We use a smaller chuck size on Wasm for the same reason as the smaller arena
+ // size (see heapArenaBytes).
pallocChunkPages = 1 << logPallocChunkPages
pallocChunkBytes = pallocChunkPages * pageSize
- logPallocChunkPages = 9
+ logPallocChunkPages = 9*(1-goarch.IsWasm) + 6*goarch.IsWasm
logPallocChunkBytes = logPallocChunkPages + gc.PageShift
// The number of radix bits for each level.
// heapAddrBits | L1 Bits | L2 Bits | L2 Entry Size
// ------------------------------------------------
// 32 | 0 | 10 | 128 KiB
+ // 32 (wasm) | 0 | 13 | 128 KiB
// 33 (iOS) | 0 | 11 | 256 KiB
// 48 | 13 | 13 | 1 MiB
//
BaseChunkIdx: {{0, 25}},
},
},
- "AllFree64": {
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {},
- },
- scav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{21, 1}, {63, 65}},
- },
- hits: []hit{
- {64, PageBase(BaseChunkIdx, 0), 2 * PageSize},
- {64, PageBase(BaseChunkIdx, 64), 64 * PageSize},
- {64, PageBase(BaseChunkIdx, 128), 0},
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 192}},
- },
- },
- "AllFree65": {
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {},
- },
- scav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{129, 1}},
- },
- hits: []hit{
- {65, PageBase(BaseChunkIdx, 0), 0},
- {65, PageBase(BaseChunkIdx, 65), PageSize},
- {65, PageBase(BaseChunkIdx, 130), 0},
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 195}},
- },
- },
"ExhaustPallocChunkPages-3": {
before: map[ChunkIdx][]BitRange{
BaseChunkIdx: {},
BaseChunkIdx: {{0, PallocChunkPages}},
},
},
- "StraddlePallocChunkPages": {
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages / 2}},
- BaseChunkIdx + 1: {{PallocChunkPages / 2, PallocChunkPages / 2}},
- },
- scav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {},
- BaseChunkIdx + 1: {{3, 100}},
- },
- hits: []hit{
- {PallocChunkPages, PageBase(BaseChunkIdx, PallocChunkPages/2), 100 * PageSize},
- {PallocChunkPages, 0, 0},
- {1, 0, 0},
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 1: {{0, PallocChunkPages}},
- },
- },
"StraddlePallocChunkPages+1": {
before: map[ChunkIdx][]BitRange{
BaseChunkIdx: {{0, PallocChunkPages / 2}},
BaseChunkIdx + 0x41: {{0, PallocChunkPages}},
},
},
- "StraddlePallocChunkPages*2": {
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages / 2}},
- BaseChunkIdx + 1: {},
- BaseChunkIdx + 2: {{PallocChunkPages / 2, PallocChunkPages / 2}},
- },
- scav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 7}},
- BaseChunkIdx + 1: {{3, 5}, {121, 10}},
- BaseChunkIdx + 2: {{PallocChunkPages/2 + 12, 2}},
- },
- hits: []hit{
- {PallocChunkPages * 2, PageBase(BaseChunkIdx, PallocChunkPages/2), 15 * PageSize},
- {PallocChunkPages * 2, 0, 0},
- {1, 0, 0},
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 1: {{0, PallocChunkPages}},
- BaseChunkIdx + 2: {{0, PallocChunkPages}},
- },
- },
"StraddlePallocChunkPages*5/4": {
before: map[ChunkIdx][]BitRange{
BaseChunkIdx: {{0, PallocChunkPages}},
BaseChunkIdx + 3: {{0, PallocChunkPages}},
},
},
- "AllFreePallocChunkPages*7+5": {
+ }
+ if PallocChunkPages >= 512 {
+ tests["AllFree64"] = test{
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {},
+ },
+ scav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{21, 1}, {63, 65}},
+ },
+ hits: []hit{
+ {64, PageBase(BaseChunkIdx, 0), 2 * PageSize},
+ {64, PageBase(BaseChunkIdx, 64), 64 * PageSize},
+ {64, PageBase(BaseChunkIdx, 128), 0},
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, 192}},
+ },
+ }
+ tests["AllFree65"] = test{
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {},
+ },
+ scav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{129, 1}},
+ },
+ hits: []hit{
+ {65, PageBase(BaseChunkIdx, 0), 0},
+ {65, PageBase(BaseChunkIdx, 65), PageSize},
+ {65, PageBase(BaseChunkIdx, 130), 0},
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, 195}},
+ },
+ }
+ tests["StraddlePallocChunkPages"] = test{
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages / 2}},
+ BaseChunkIdx + 1: {{PallocChunkPages / 2, PallocChunkPages / 2}},
+ },
+ scav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {},
+ BaseChunkIdx + 1: {{3, 100}},
+ },
+ hits: []hit{
+ {PallocChunkPages, PageBase(BaseChunkIdx, PallocChunkPages/2), 100 * PageSize},
+ {PallocChunkPages, 0, 0},
+ {1, 0, 0},
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 1: {{0, PallocChunkPages}},
+ },
+ }
+ tests["AllFreePallocChunkPages*7+5"] = test{
before: map[ChunkIdx][]BitRange{
BaseChunkIdx: {},
BaseChunkIdx + 1: {},
BaseChunkIdx + 6: {{0, PallocChunkPages}},
BaseChunkIdx + 7: {{0, 6}},
},
- },
+ }
+ tests["StraddlePallocChunkPages*2"] = test{
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages / 2}},
+ BaseChunkIdx + 1: {},
+ BaseChunkIdx + 2: {{PallocChunkPages / 2, PallocChunkPages / 2}},
+ },
+ scav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, 7}},
+ BaseChunkIdx + 1: {{3, 5}, {121, 10}},
+ BaseChunkIdx + 2: {{PallocChunkPages/2 + 12, 2}},
+ },
+ hits: []hit{
+ {PallocChunkPages * 2, PageBase(BaseChunkIdx, PallocChunkPages/2), 15 * PageSize},
+ {PallocChunkPages * 2, 0, 0},
+ {1, 0, 0},
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 1: {{0, PallocChunkPages}},
+ BaseChunkIdx + 2: {{0, PallocChunkPages}},
+ },
+ }
}
// Disable these tests on iOS since we have a small address space.
// See #46860.
if GOOS == "openbsd" && testing.Short() {
t.Skip("skipping because virtual memory is limited; see #36210")
}
- tests := map[string]struct {
+ type test struct {
before map[ChunkIdx][]BitRange
after map[ChunkIdx][]BitRange
npages uintptr
frees []uintptr
- }{
+ }
+ tests := map[string]test{
"Free1": {
npages: 1,
before: map[ChunkIdx][]BitRange{
BaseChunkIdx: {{25, PallocChunkPages - 25}},
},
},
- "Free64": {
- npages: 64,
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- },
- frees: []uintptr{
- PageBase(BaseChunkIdx, 0),
- PageBase(BaseChunkIdx, 64),
- PageBase(BaseChunkIdx, 128),
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{192, PallocChunkPages - 192}},
- },
- },
- "Free65": {
- npages: 65,
- before: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- },
- frees: []uintptr{
- PageBase(BaseChunkIdx, 0),
- PageBase(BaseChunkIdx, 65),
- PageBase(BaseChunkIdx, 130),
- },
- after: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{195, PallocChunkPages - 195}},
- },
- },
"FreePallocChunkPages": {
npages: PallocChunkPages,
before: map[ChunkIdx][]BitRange{
},
},
}
+ if PallocChunkPages >= 512 {
+ // avoid constant overflow when PallocChunkPages is small
+ var PallocChunkPages uint = PallocChunkPages
+ tests["Free64"] = test{
+ npages: 64,
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ },
+ frees: []uintptr{
+ PageBase(BaseChunkIdx, 0),
+ PageBase(BaseChunkIdx, 64),
+ PageBase(BaseChunkIdx, 128),
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{192, PallocChunkPages - 192}},
+ },
+ }
+ tests["Free65"] = test{
+ npages: 65,
+ before: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ },
+ frees: []uintptr{
+ PageBase(BaseChunkIdx, 0),
+ PageBase(BaseChunkIdx, 65),
+ PageBase(BaseChunkIdx, 130),
+ },
+ after: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{195, PallocChunkPages - 195}},
+ },
+ }
+ }
for name, v := range tests {
v := v
t.Run(name, func(t *testing.T) {
afterScav map[ChunkIdx][]BitRange
}
tests := map[string]test{
- "AllFree": {
- beforeAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {},
- },
- beforeScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{1, 1}, {64, 64}},
- },
- hits: []PageCache{
- NewPageCache(PageBase(BaseChunkIdx, 0), ^uint64(0), 0x2),
- NewPageCache(PageBase(BaseChunkIdx, 64), ^uint64(0), ^uint64(0)),
- NewPageCache(PageBase(BaseChunkIdx, 128), ^uint64(0), 0),
- NewPageCache(PageBase(BaseChunkIdx, 192), ^uint64(0), 0),
- },
- afterAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 256}},
- },
- },
"ManyArena": {
beforeAlloc: map[ChunkIdx][]BitRange{
BaseChunkIdx: {{0, PallocChunkPages}},
BaseChunkIdx + 2: {{0, PallocChunkPages}},
},
},
- "NotContiguous": {
+
+ "Fail": {
beforeAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 0xff: {{0, 0}},
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ },
+ hits: []PageCache{
+ NewPageCache(0, 0, 0),
+ NewPageCache(0, 0, 0),
+ NewPageCache(0, 0, 0),
+ },
+ afterAlloc: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ },
+ },
+ "RetainScavBits": {
+ beforeAlloc: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, 1}, {10, 2}},
},
beforeScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 0xff: {{31, 67}},
+ BaseChunkIdx: {{0, 4}, {11, 1}},
},
hits: []PageCache{
- NewPageCache(PageBase(BaseChunkIdx+0xff, 0), ^uint64(0), ((uint64(1)<<33)-1)<<31),
+ NewPageCache(PageBase(BaseChunkIdx, 0), ^uint64(0x1|(0x3<<10)), 0x7<<1),
},
afterAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 0xff: {{0, 64}},
+ BaseChunkIdx: {{0, 64}},
},
afterScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
- BaseChunkIdx + 0xff: {{64, 34}},
+ BaseChunkIdx: {{0, 1}, {11, 1}},
},
},
- "First": {
+ }
+ if PallocChunkPages >= 512 {
+ tests["AllFree"] = test{
beforeAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 32}, {33, 31}, {96, 32}},
+ BaseChunkIdx: {},
},
beforeScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{1, 4}, {31, 5}, {66, 2}},
+ BaseChunkIdx: {{1, 1}, {64, 64}},
},
hits: []PageCache{
- NewPageCache(PageBase(BaseChunkIdx, 0), 1<<32, 1<<32),
- NewPageCache(PageBase(BaseChunkIdx, 64), (uint64(1)<<32)-1, 0x3<<2),
+ NewPageCache(PageBase(BaseChunkIdx, 0), ^uint64(0), 0x2),
+ NewPageCache(PageBase(BaseChunkIdx, 64), ^uint64(0), ^uint64(0)),
+ NewPageCache(PageBase(BaseChunkIdx, 128), ^uint64(0), 0),
+ NewPageCache(PageBase(BaseChunkIdx, 192), ^uint64(0), 0),
},
afterAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 128}},
+ BaseChunkIdx: {{0, 256}},
},
- },
- "Fail": {
+ }
+ tests["NotContiguous"] = test{
beforeAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 0xff: {{0, 0}},
+ },
+ beforeScav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 0xff: {{31, 67}},
},
hits: []PageCache{
- NewPageCache(0, 0, 0),
- NewPageCache(0, 0, 0),
- NewPageCache(0, 0, 0),
+ NewPageCache(PageBase(BaseChunkIdx+0xff, 0), ^uint64(0), ((uint64(1)<<33)-1)<<31),
},
afterAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 0xff: {{0, 64}},
},
- },
- "RetainScavBits": {
+ afterScav: map[ChunkIdx][]BitRange{
+ BaseChunkIdx: {{0, PallocChunkPages}},
+ BaseChunkIdx + 0xff: {{64, 34}},
+ },
+ }
+ tests["First"] = test{
beforeAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 1}, {10, 2}},
+ BaseChunkIdx: {{0, 32}, {33, 31}, {96, 32}},
},
beforeScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 4}, {11, 1}},
+ BaseChunkIdx: {{1, 4}, {31, 5}, {66, 2}},
},
hits: []PageCache{
- NewPageCache(PageBase(BaseChunkIdx, 0), ^uint64(0x1|(0x3<<10)), 0x7<<1),
+ NewPageCache(PageBase(BaseChunkIdx, 0), 1<<32, 1<<32),
+ NewPageCache(PageBase(BaseChunkIdx, 64), (uint64(1)<<32)-1, 0x3<<2),
},
afterAlloc: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 64}},
- },
- afterScav: map[ChunkIdx][]BitRange{
- BaseChunkIdx: {{0, 1}, {11, 1}},
+ BaseChunkIdx: {{0, 128}},
},
- },
+ }
}
// Disable these tests on iOS since we have a small address space.
// See #46860.
want[PallocChunkPages/64-1] = 1 << 63
test(t, PallocChunkPages-1, 1, want)
})
- t.Run("Inner", func(t *testing.T) {
- want := new(PallocBits)
- want[2] = 0x3e
- test(t, 129, 5, want)
- })
- t.Run("Aligned", func(t *testing.T) {
- want := new(PallocBits)
- want[2] = ^uint64(0)
- want[3] = ^uint64(0)
- test(t, 128, 128, want)
- })
- t.Run("Begin", func(t *testing.T) {
- want := new(PallocBits)
- want[0] = ^uint64(0)
- want[1] = ^uint64(0)
- want[2] = ^uint64(0)
- want[3] = ^uint64(0)
- want[4] = ^uint64(0)
- want[5] = 0x1
- test(t, 0, 321, want)
- })
- t.Run("End", func(t *testing.T) {
- want := new(PallocBits)
- want[PallocChunkPages/64-1] = ^uint64(0)
- want[PallocChunkPages/64-2] = ^uint64(0)
- want[PallocChunkPages/64-3] = ^uint64(0)
- want[PallocChunkPages/64-4] = 1 << 63
- test(t, PallocChunkPages-(64*3+1), 64*3+1, want)
- })
+ if PallocChunkPages >= 512 {
+ t.Run("Inner", func(t *testing.T) {
+ want := new(PallocBits)
+ want[:][2] = 0x3e
+ test(t, 129, 5, want)
+ })
+ t.Run("Aligned", func(t *testing.T) {
+ want := new(PallocBits)
+ want[:][2] = ^uint64(0)
+ want[:][3] = ^uint64(0)
+ test(t, 128, 128, want)
+ })
+ t.Run("Begin", func(t *testing.T) {
+ want := new(PallocBits)
+ want[:][0] = ^uint64(0)
+ want[:][1] = ^uint64(0)
+ want[:][2] = ^uint64(0)
+ want[:][3] = ^uint64(0)
+ want[:][4] = ^uint64(0)
+ want[:][5] = 0x1
+ test(t, 0, 321, want)
+ })
+ t.Run("End", func(t *testing.T) {
+ // avoid constant overflow when PallocChunkPages is small
+ var PallocChunkPages uint = PallocChunkPages
+ want := new(PallocBits)
+ want[PallocChunkPages/64-1] = ^uint64(0)
+ want[PallocChunkPages/64-2] = ^uint64(0)
+ want[PallocChunkPages/64-3] = ^uint64(0)
+ want[PallocChunkPages/64-4] = 1 << 63
+ test(t, PallocChunkPages-(64*3+1), 64*3+1, want)
+ })
+ }
t.Run("All", func(t *testing.T) {
want := new(PallocBits)
for i := range want {
i, n uint // bit range to popcnt over.
want uint // expected popcnt result on that range.
}
- tests := map[string]struct {
+ type testCase struct {
init []BitRange // bit ranges to set to 1 in the bitmap.
tests []test // a set of popcnt tests to run over the bitmap.
- }{
+ }
+ tests := map[string]testCase{
"None": {
tests: []test{
{0, 1, 0},
{0, PallocChunkPages, PallocChunkPages / 2},
},
},
- "OddBound": {
+ }
+ if PallocChunkPages >= 512 {
+ tests["OddBound"] = testCase{
init: []BitRange{{0, 111}},
tests: []test{
{0, 1, 1},
{PallocChunkPages / 2, PallocChunkPages / 2, 0},
{0, PallocChunkPages, 111},
},
- },
- "Scattered": {
+ }
+ tests["Scattered"] = testCase{
init: []BitRange{
{1, 3}, {5, 1}, {7, 1}, {10, 2}, {13, 1}, {15, 4},
{21, 1}, {23, 1}, {26, 2}, {30, 5}, {36, 2}, {40, 3},
{1, 128, 74},
{0, PallocChunkPages, 75},
},
- },
+ }
}
for name, v := range tests {
v := v
PackPallocSum(11, 23, 23),
},
}
- tests["StartMaxEnd"] = test{
- free: []BitRange{{0, 4}, {50, 100}, {PallocChunkPages - 4, 4}},
- hits: []PallocSum{
- PackPallocSum(4, 100, 4),
- },
- }
- tests["OnlyMax"] = test{
- free: []BitRange{{1, 20}, {35, 241}, {PallocChunkPages - 50, 30}},
- hits: []PallocSum{
- PackPallocSum(0, 241, 0),
- },
- }
- tests["MultiMax"] = test{
- free: []BitRange{{35, 2}, {40, 5}, {100, 5}},
- hits: []PallocSum{
- PackPallocSum(0, 5, 0),
- },
+ if PallocChunkPages >= 512 {
+ tests["StartMaxEnd"] = test{
+ free: []BitRange{{0, 4}, {50, 100}, {PallocChunkPages - 4, 4}},
+ hits: []PallocSum{
+ PackPallocSum(4, 100, 4),
+ },
+ }
+ tests["OnlyMax"] = test{
+ free: []BitRange{{1, 20}, {35, 241}, {PallocChunkPages - 50, 30}},
+ hits: []PallocSum{
+ PackPallocSum(0, 241, 0),
+ },
+ }
+ tests["MultiMax"] = test{
+ free: []BitRange{{35, 2}, {40, 5}, {100, 5}},
+ hits: []PallocSum{
+ PackPallocSum(0, 5, 0),
+ },
+ }
}
tests["One"] = test{
free: []BitRange{{2, 1}},
// Ensures page allocation works.
func TestPallocBitsAlloc(t *testing.T) {
- tests := map[string]struct {
+ type test struct {
before []BitRange
after []BitRange
npages uintptr
hits []uint
- }{
+ }
+ tests := map[string]test{
"AllFree1": {
npages: 1,
hits: []uint{0, 1, 2, 3, 4, 5},
hits: []uint{0, 5, 10, 15, 20},
after: []BitRange{{0, 25}},
},
- "AllFree64": {
- npages: 64,
- hits: []uint{0, 64, 128},
- after: []BitRange{{0, 192}},
- },
- "AllFree65": {
- npages: 65,
- hits: []uint{0, 65, 130},
- after: []BitRange{{0, 195}},
- },
- "SomeFree64": {
- before: []BitRange{{0, 32}, {64, 32}, {100, PallocChunkPages - 100}},
- npages: 64,
- hits: []uint{^uint(0)},
- after: []BitRange{{0, 32}, {64, 32}, {100, PallocChunkPages - 100}},
- },
"NoneFree1": {
before: []BitRange{{0, PallocChunkPages}},
npages: 1,
hits: []uint{PallocChunkPages/2 - 3, ^uint(0)},
after: []BitRange{{0, PallocChunkPages}},
},
- "ExactFit65": {
+ }
+ if PallocChunkPages >= 512 {
+ // avoid constant overflow when PallocChunkPages is small
+ var PallocChunkPages uint = PallocChunkPages
+ tests["AllFree64"] = test{
+ npages: 64,
+ hits: []uint{0, 64, 128},
+ after: []BitRange{{0, 192}},
+ }
+ tests["AllFree65"] = test{
+ npages: 65,
+ hits: []uint{0, 65, 130},
+ after: []BitRange{{0, 195}},
+ }
+ tests["SomeFree64"] = test{
+ before: []BitRange{{0, 32}, {64, 32}, {100, PallocChunkPages - 100}},
+ npages: 64,
+ hits: []uint{^uint(0)},
+ after: []BitRange{{0, 32}, {64, 32}, {100, PallocChunkPages - 100}},
+ }
+ tests["ExactFit65"] = test{
before: []BitRange{{0, PallocChunkPages/2 - 31}, {PallocChunkPages/2 + 34, PallocChunkPages/2 - 34}},
npages: 65,
hits: []uint{PallocChunkPages/2 - 31, ^uint(0)},
after: []BitRange{{0, PallocChunkPages}},
- },
- "SomeFree161": {
+ }
+ tests["SomeFree161"] = test{
before: []BitRange{{0, 185}, {331, 1}},
npages: 161,
hits: []uint{332},
after: []BitRange{{0, 185}, {331, 162}},
- },
+ }
}
for name, v := range tests {
v := v
// Ensures page freeing works.
func TestPallocBitsFree(t *testing.T) {
- tests := map[string]struct {
+ type test struct {
beforeInv []BitRange
afterInv []BitRange
frees []uint
npages uintptr
- }{
- "SomeFree": {
- npages: 1,
- beforeInv: []BitRange{{0, 32}, {64, 32}, {100, 1}},
- frees: []uint{32},
- afterInv: []BitRange{{0, 33}, {64, 32}, {100, 1}},
- },
+ }
+ tests := map[string]test{
"NoneFree1": {
npages: 1,
frees: []uint{0, 1, 2, 3, 4, 5},
frees: []uint{0, 5, 10, 15, 20},
afterInv: []BitRange{{0, 25}},
},
- "NoneFree64": {
+ }
+ if PallocChunkPages >= 512 {
+ tests["SomeFree"] = test{
+ npages: 1,
+ beforeInv: []BitRange{{0, 32}, {64, 32}, {100, 1}},
+ frees: []uint{32},
+ afterInv: []BitRange{{0, 33}, {64, 32}, {100, 1}},
+ }
+ tests["NoneFree64"] = test{
npages: 64,
frees: []uint{0, 64, 128},
afterInv: []BitRange{{0, 192}},
- },
- "NoneFree65": {
+ }
+ tests["NoneFree65"] = test{
npages: 65,
frees: []uint{0, 65, 130},
afterInv: []BitRange{{0, 195}},
- },
+ }
}
for name, v := range tests {
v := v
"io"
)
-// Expect 8 MB of memory usage for a small wasm program.
-// This reflects the current allocator. We test an exact
-// value here, but if the allocator changes, we can update
-// or relax this.
-const want = 8 << 20
+// Expect less than 3 MB of memory usage for a small wasm program.
+// This reflects the current allocator. If the allocator changes,
+// update this value.
+const want = 3 << 20
var w = io.Discard
const pageSize = 64 * 1024
sz := uintptr(currentMemory()) * pageSize
- if sz != want {
- fmt.Printf("FAIL: unexpected memory size %d, want %d\n", sz, want)
+ if sz > want {
+ fmt.Printf("FAIL: unexpected memory size %d, want <= %d\n", sz, want)
}
}