import (
"internal/abi"
+ "internal/goarch"
"internal/goexperiment"
"unsafe"
)
a.valueStart = append(a.valueStart, len(a.steps))
var ok, ptr bool
if ifaceIndir(rcvr) || rcvr.pointers() {
- ok = a.assignIntN(0, ptrSize, 1, 0b1)
+ ok = a.assignIntN(0, goarch.PtrSize, 1, 0b1)
ptr = true
} else {
// TODO(mknyszek): Is this case even possible?
// in the reflect package which only conditionally added
// a pointer bit to the reflect.(Value).Call stack frame's
// GC bitmap.
- ok = a.assignIntN(0, ptrSize, 1, 0b0)
+ ok = a.assignIntN(0, goarch.PtrSize, 1, 0b0)
ptr = false
}
if !ok {
- a.stackAssign(ptrSize, ptrSize)
+ a.stackAssign(goarch.PtrSize, goarch.PtrSize)
return &a.steps[len(a.steps)-1], ptr
}
return nil, ptr
case Bool, Int, Uint, Int8, Uint8, Int16, Uint16, Int32, Uint32, Uintptr:
return a.assignIntN(offset, t.size, 1, 0b0)
case Int64, Uint64:
- switch ptrSize {
+ switch goarch.PtrSize {
case 4:
return a.assignIntN(offset, 4, 2, 0b0)
case 8:
case Complex128:
return a.assignFloatN(offset, 8, 2)
case String:
- return a.assignIntN(offset, ptrSize, 2, 0b01)
+ return a.assignIntN(offset, goarch.PtrSize, 2, 0b01)
case Interface:
- return a.assignIntN(offset, ptrSize, 2, 0b10)
+ return a.assignIntN(offset, goarch.PtrSize, 2, 0b10)
case Slice:
- return a.assignIntN(offset, ptrSize, 3, 0b001)
+ return a.assignIntN(offset, goarch.PtrSize, 3, 0b001)
case Array:
tt := (*arrayType)(unsafe.Pointer(t))
switch tt.len {
if n > 8 || n < 0 {
panic("invalid n")
}
- if ptrMap != 0 && size != ptrSize {
+ if ptrMap != 0 && size != goarch.PtrSize {
panic("non-empty pointer map passed for non-pointer-size values")
}
if a.iregs+n > intArgRegs {
stackPtrs.append(0)
}
} else {
- spill += ptrSize
+ spill += goarch.PtrSize
}
}
for i, arg := range t.in() {
}
}
}
- spill = align(spill, ptrSize)
+ spill = align(spill, goarch.PtrSize)
// From the input parameters alone, we now know
// the stackCallArgsSize and retOffset.
stackCallArgsSize := in.stackBytes
- retOffset := align(in.stackBytes, ptrSize)
+ retOffset := align(in.stackBytes, goarch.PtrSize)
// Compute the stack frame pointer bitmap and register
// pointer bitmap for return values.
"flag"
"fmt"
"go/token"
+ "internal/goarch"
"io"
"math"
"math/rand"
func TestFuncLayout(t *testing.T) {
align := func(x uintptr) uintptr {
- return (x + PtrSize - 1) &^ (PtrSize - 1)
+ return (x + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
}
var r []byte
- if PtrSize == 4 {
+ if goarch.PtrSize == 4 {
r = []byte{0, 0, 0, 1}
} else {
r = []byte{0, 0, 1}
tests := []test{
{
typ: ValueOf(func(a, b string) string { return "" }).Type(),
- size: 6 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ size: 6 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{1, 0, 1, 0, 1},
gc: []byte{1, 0, 1, 0, 1},
},
{
typ: ValueOf(func(a, b, c uint32, p *byte, d uint16) {}).Type(),
- size: align(align(3*4) + PtrSize + 2),
- argsize: align(3*4) + PtrSize + 2,
- retOffset: align(align(3*4) + PtrSize + 2),
+ size: align(align(3*4) + goarch.PtrSize + 2),
+ argsize: align(3*4) + goarch.PtrSize + 2,
+ retOffset: align(align(3*4) + goarch.PtrSize + 2),
stack: r,
gc: r,
},
{
typ: ValueOf(func(a map[int]int, b uintptr, c interface{}) {}).Type(),
- size: 4 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ size: 4 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{1, 0, 1, 1},
gc: []byte{1, 0, 1, 1},
},
{
typ: ValueOf(func(a S) {}).Type(),
- size: 4 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ size: 4 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{0, 0, 1, 1},
gc: []byte{0, 0, 1, 1},
},
{
rcvr: ValueOf((*byte)(nil)).Type(),
typ: ValueOf(func(a uintptr, b *int) {}).Type(),
- size: 3 * PtrSize,
- argsize: 3 * PtrSize,
- retOffset: 3 * PtrSize,
+ size: 3 * goarch.PtrSize,
+ argsize: 3 * goarch.PtrSize,
+ retOffset: 3 * goarch.PtrSize,
stack: []byte{1, 0, 1},
gc: []byte{1, 0, 1},
},
{
typ: ValueOf(func(a uintptr) {}).Type(),
- size: PtrSize,
- argsize: PtrSize,
- retOffset: PtrSize,
+ size: goarch.PtrSize,
+ argsize: goarch.PtrSize,
+ retOffset: goarch.PtrSize,
stack: []byte{},
gc: []byte{},
},
{
typ: ValueOf(func() uintptr { return 0 }).Type(),
- size: PtrSize,
+ size: goarch.PtrSize,
argsize: 0,
retOffset: 0,
stack: []byte{},
{
rcvr: ValueOf(uintptr(0)).Type(),
typ: ValueOf(func(a uintptr) {}).Type(),
- size: 2 * PtrSize,
- argsize: 2 * PtrSize,
- retOffset: 2 * PtrSize,
+ size: 2 * goarch.PtrSize,
+ argsize: 2 * goarch.PtrSize,
+ retOffset: 2 * goarch.PtrSize,
stack: []byte{1},
gc: []byte{1},
// Note: this one is tricky, as the receiver is not a pointer. But we
verifyGCBits(t, TypeOf(([][10000]Xscalar)(nil)), lit(1))
verifyGCBits(t, SliceOf(ArrayOf(10000, Tscalar)), lit(1))
- hdr := make([]byte, 8/PtrSize)
+ hdr := make([]byte, 8/goarch.PtrSize)
verifyMapBucket := func(t *testing.T, k, e Type, m interface{}, want []byte) {
verifyGCBits(t, MapBucketOf(k, e), want)
join(hdr, rep(8, lit(0, 1)), rep(8, lit(1)), lit(1)))
verifyMapBucket(t, Tint64, Tptr,
map[int64]Xptr(nil),
- join(hdr, rep(8, rep(8/PtrSize, lit(0))), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(8, rep(8/goarch.PtrSize, lit(0))), rep(8, lit(1)), lit(1)))
verifyMapBucket(t,
Tscalar, Tscalar,
map[Xscalar]Xscalar(nil),
map[[2]Xscalarptr][3]Xptrscalar(nil),
join(hdr, rep(8*2, lit(0, 1)), rep(8*3, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize, Tscalarptr), ArrayOf(64/PtrSize, Tptrscalar),
- map[[64 / PtrSize]Xscalarptr][64 / PtrSize]Xptrscalar(nil),
- join(hdr, rep(8*64/PtrSize, lit(0, 1)), rep(8*64/PtrSize, lit(1, 0)), lit(1)))
+ ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
+ map[[64 / goarch.PtrSize]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
+ join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize+1, Tscalarptr), ArrayOf(64/PtrSize, Tptrscalar),
- map[[64/PtrSize + 1]Xscalarptr][64 / PtrSize]Xptrscalar(nil),
- join(hdr, rep(8, lit(1)), rep(8*64/PtrSize, lit(1, 0)), lit(1)))
+ ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
+ map[[64/goarch.PtrSize + 1]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
+ join(hdr, rep(8, lit(1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize, Tscalarptr), ArrayOf(64/PtrSize+1, Tptrscalar),
- map[[64 / PtrSize]Xscalarptr][64/PtrSize + 1]Xptrscalar(nil),
- join(hdr, rep(8*64/PtrSize, lit(0, 1)), rep(8, lit(1)), lit(1)))
+ ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
+ map[[64 / goarch.PtrSize]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
+ join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8, lit(1)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize+1, Tscalarptr), ArrayOf(64/PtrSize+1, Tptrscalar),
- map[[64/PtrSize + 1]Xscalarptr][64/PtrSize + 1]Xptrscalar(nil),
+ ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
+ map[[64/goarch.PtrSize + 1]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
join(hdr, rep(8, lit(1)), rep(8, lit(1)), lit(1)))
}
package reflect
import (
+ "internal/goarch"
"sync"
"unsafe"
)
var CallGC = &callGC
-const PtrSize = ptrSize
+const PtrSize = goarch.PtrSize
// FuncLayout calls funcLayout and returns a subset of the results for testing.
//
// Expand frame type's GC bitmap into byte-map.
ptrs = ft.ptrdata != 0
if ptrs {
- nptrs := ft.ptrdata / ptrSize
+ nptrs := ft.ptrdata / goarch.PtrSize
gcdata := ft.gcSlice(0, (nptrs+7)/8)
for i := uintptr(0); i < nptrs; i++ {
gc = append(gc, gcdata[i/8]>>(i%8)&1)
package reflect
import (
+ "internal/goarch"
"internal/unsafeheader"
"unsafe"
)
// Some common & small cases, without using memmove:
if hasPtr {
- if size == ptrSize {
+ if size == goarch.PtrSize {
ps := *(*[]unsafe.Pointer)(v.ptr)
return func(i, j int) { ps[i], ps[j] = ps[j], ps[i] }
}
package reflect
import (
+ "internal/goarch"
"internal/unsafeheader"
"strconv"
"sync"
}
mt.flags = 0
if ktyp.size > maxKeySize {
- mt.keysize = uint8(ptrSize)
+ mt.keysize = uint8(goarch.PtrSize)
mt.flags |= 1 // indirect key
} else {
mt.keysize = uint8(ktyp.size)
}
if etyp.size > maxValSize {
- mt.valuesize = uint8(ptrSize)
+ mt.valuesize = uint8(goarch.PtrSize)
mt.flags |= 2 // indirect value
} else {
mt.valuesize = uint8(etyp.size)
var ptrdata uintptr
var overflowPad uintptr
- size := bucketSize*(1+ktyp.size+etyp.size) + overflowPad + ptrSize
+ size := bucketSize*(1+ktyp.size+etyp.size) + overflowPad + goarch.PtrSize
if size&uintptr(ktyp.align-1) != 0 || size&uintptr(etyp.align-1) != 0 {
panic("reflect: bad size computation in MapOf")
}
if ktyp.ptrdata != 0 || etyp.ptrdata != 0 {
- nptr := (bucketSize*(1+ktyp.size+etyp.size) + ptrSize) / ptrSize
+ nptr := (bucketSize*(1+ktyp.size+etyp.size) + goarch.PtrSize) / goarch.PtrSize
mask := make([]byte, (nptr+7)/8)
- base := bucketSize / ptrSize
+ base := bucketSize / goarch.PtrSize
if ktyp.ptrdata != 0 {
emitGCMask(mask, base, ktyp, bucketSize)
}
- base += bucketSize * ktyp.size / ptrSize
+ base += bucketSize * ktyp.size / goarch.PtrSize
if etyp.ptrdata != 0 {
emitGCMask(mask, base, etyp, bucketSize)
}
- base += bucketSize * etyp.size / ptrSize
- base += overflowPad / ptrSize
+ base += bucketSize * etyp.size / goarch.PtrSize
+ base += overflowPad / goarch.PtrSize
word := base
mask[word/8] |= 1 << (word % 8)
gcdata = &mask[0]
- ptrdata = (word + 1) * ptrSize
+ ptrdata = (word + 1) * goarch.PtrSize
// overflow word must be last
if ptrdata != size {
}
b := &rtype{
- align: ptrSize,
+ align: goarch.PtrSize,
size: size,
kind: uint8(Struct),
ptrdata: ptrdata,
if typ.kind&kindGCProg != 0 {
panic("reflect: unexpected GC program")
}
- ptrs := typ.ptrdata / ptrSize
- words := typ.size / ptrSize
+ ptrs := typ.ptrdata / goarch.PtrSize
+ words := typ.size / goarch.PtrSize
mask := typ.gcSlice(0, (ptrs+7)/8)
for j := uintptr(0); j < ptrs; j++ {
if (mask[j/8]>>(j%8))&1 != 0 {
}
// Element is small with pointer mask; use as literal bits.
- ptrs := typ.ptrdata / ptrSize
+ ptrs := typ.ptrdata / goarch.PtrSize
mask := typ.gcSlice(0, (ptrs+7)/8)
// Emit 120-bit chunks of full bytes (max is 127 but we avoid using partial bytes).
}
// Pad to start of this field with zeros.
if ft.offset() > off {
- n := (ft.offset() - off) / ptrSize
+ n := (ft.offset() - off) / goarch.PtrSize
prog = append(prog, 0x01, 0x00) // emit a 0 bit
if n > 1 {
prog = append(prog, 0x81) // repeat previous bit
array.gcdata = typ.gcdata
array.ptrdata = typ.ptrdata
- case typ.kind&kindGCProg == 0 && array.size <= maxPtrmaskBytes*8*ptrSize:
+ case typ.kind&kindGCProg == 0 && array.size <= maxPtrmaskBytes*8*goarch.PtrSize:
// Element is small with pointer mask; array is still small.
// Create direct pointer mask by turning each 1 bit in elem
// into length 1 bits in larger mask.
- mask := make([]byte, (array.ptrdata/ptrSize+7)/8)
+ mask := make([]byte, (array.ptrdata/goarch.PtrSize+7)/8)
emitGCMask(mask, 0, typ, array.len)
array.gcdata = &mask[0]
prog := []byte{0, 0, 0, 0} // will be length of prog
prog = appendGCProg(prog, typ)
// Pad from ptrdata to size.
- elemPtrs := typ.ptrdata / ptrSize
- elemWords := typ.size / ptrSize
+ elemPtrs := typ.ptrdata / goarch.PtrSize
+ elemWords := typ.size / goarch.PtrSize
if elemPtrs < elemWords {
// Emit literal 0 bit, then repeat as needed.
prog = append(prog, 0x01, 0x00)
// build dummy rtype holding gc program
x := &rtype{
- align: ptrSize,
+ align: goarch.PtrSize,
// Don't add spill space here; it's only necessary in
// reflectcall's frame, not in the allocated frame.
// TODO(mknyszek): Remove this comment when register
// spill space in the frame is no longer required.
- size: align(abi.retOffset+abi.ret.stackBytes, ptrSize),
- ptrdata: uintptr(abi.stackPtrs.n) * ptrSize,
+ size: align(abi.retOffset+abi.ret.stackBytes, goarch.PtrSize),
+ ptrdata: uintptr(abi.stackPtrs.n) * goarch.PtrSize,
}
if abi.stackPtrs.n > 0 {
x.gcdata = &abi.stackPtrs.data[0]
switch Kind(t.kind & kindMask) {
case Chan, Func, Map, Ptr, Slice, String, UnsafePointer:
// 1 pointer at start of representation
- for bv.n < uint32(offset/uintptr(ptrSize)) {
+ for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
bv.append(0)
}
bv.append(1)
case Interface:
// 2 pointers
- for bv.n < uint32(offset/uintptr(ptrSize)) {
+ for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
bv.append(0)
}
bv.append(1)
import (
"internal/abi"
+ "internal/goarch"
"internal/itoa"
"internal/unsafeheader"
"math"
// v.Kind() must be Ptr, Map, Chan, Func, or UnsafePointer
// if v.Kind() == Ptr, the base type must not be go:notinheap.
func (v Value) pointer() unsafe.Pointer {
- if v.typ.size != ptrSize || !v.typ.pointers() {
+ if v.typ.size != goarch.PtrSize || !v.typ.pointers() {
panic("can't call pointer on a non-pointer Value")
}
if v.flag&flagIndir != 0 {
}
// TODO(mknyszek): Remove this when we no longer have
// caller reserved spill space.
- frameSize = align(frameSize, ptrSize)
+ frameSize = align(frameSize, goarch.PtrSize)
frameSize += abi.spill
// Mark pointers in registers for the return path.
methodFrameSize := methodFrameType.size
// TODO(mknyszek): Remove this when we no longer have
// caller reserved spill space.
- methodFrameSize = align(methodFrameSize, ptrSize)
+ methodFrameSize = align(methodFrameSize, goarch.PtrSize)
methodFrameSize += methodABI.spill
// Mark pointers in registers for the return path.