package abi
-// ZeroValSize is the size in bytes of [ZeroVal].
+// ZeroValSize is the size in bytes of runtime.zeroVal.
const ZeroValSize = 1024
-
-// ZeroVal is a region containing all zero bytes.
-var ZeroVal [ZeroValSize]byte
// v.ptr doesn't escape, as Equal functions are compiler generated
// and never escape. The escape analysis doesn't know, as it is a
// function pointer call.
- return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&abi.ZeroVal[0]))
+ return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
}
if typ.TFlag&abi.TFlagRegularMemory != 0 {
// For some types where the zero value is a value where all bits of this type are 0
// If the type is comparable, then compare directly with zero.
if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
// See noescape justification above.
- return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&abi.ZeroVal[0]))
+ return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
}
if typ.TFlag&abi.TFlagRegularMemory != 0 {
// For some types where the zero value is a value where all bits of this type are 0
}
x = x.assignTo("reflect.Set", v.typ(), target)
if x.flag&flagIndir != 0 {
- if x.ptr == unsafe.Pointer(&abi.ZeroVal[0]) {
+ if x.ptr == unsafe.Pointer(&zeroVal[0]) {
typedmemclr(v.typ(), v.ptr)
} else {
typedmemmove(v.typ(), v.ptr, x.ptr)
if t.IfaceIndir() {
var p unsafe.Pointer
if t.Size() <= abi.ZeroValSize {
- p = unsafe.Pointer(&abi.ZeroVal[0])
+ p = unsafe.Pointer(&zeroVal[0])
} else {
p = unsafe_New(t)
}
return Value{t, nil, fl}
}
+//go:linkname zeroVal runtime.zeroVal
+var zeroVal [abi.ZeroValSize]byte
+
// New returns a Value representing a pointer to a new zero value
// for the specified type. That is, the returned Value's Type is [PointerTo](typ).
func New(typ Type) Value {
func convTstring(val string) (x unsafe.Pointer) {
if val == "" {
- x = unsafe.Pointer(&abi.ZeroVal[0])
+ x = unsafe.Pointer(&zeroVal[0])
} else {
x = mallocgc(unsafe.Sizeof(val), stringType, true)
*(*string)(x) = val
func convTslice(val []byte) (x unsafe.Pointer) {
// Note: this must work for any element type, not just byte.
if (*slice)(unsafe.Pointer(&val)).array == nil {
- x = unsafe.Pointer(&abi.ZeroVal[0])
+ x = unsafe.Pointer(&zeroVal[0])
} else {
x = mallocgc(unsafe.Sizeof(val), sliceType, true)
*(*[]byte)(x) = val
if err := mapKeyError(t, key); err != nil {
panic(err) // see issue 23734
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
func mapaccess2(t *maptype, h *hmap, key unsafe.Pointer) (unsafe.Pointer, bool) {
if err := mapKeyError(t, key); err != nil {
panic(err) // see issue 23734
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
// returns both key and elem. Used by map iterator.
func mapaccess1_fat(t *maptype, h *hmap, key, zero unsafe.Pointer) unsafe.Pointer {
e := mapaccess1(t, h, key)
- if e == unsafe.Pointer(&abi.ZeroVal[0]) {
+ if e == unsafe.Pointer(&zeroVal[0]) {
return zero
}
return e
func mapaccess2_fat(t *maptype, h *hmap, key, zero unsafe.Pointer) (unsafe.Pointer, bool) {
e := mapaccess1(t, h, key)
- if e == unsafe.Pointer(&abi.ZeroVal[0]) {
+ if e == unsafe.Pointer(&zeroVal[0]) {
return zero, false
}
return e, true
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast32))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
func mapaccess2_fast32(t *maptype, h *hmap, key uint32) (unsafe.Pointer, bool) {
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_fast32))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
func mapassign_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer {
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast64))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
func mapaccess2_fast64(t *maptype, h *hmap, key uint64) (unsafe.Pointer, bool) {
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_fast64))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
func mapassign_fast64(t *maptype, h *hmap, key uint64) unsafe.Pointer {
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_faststr))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize))
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
// long key, try not to do more comparisons than necessary
keymaybe := uintptr(abi.MapBucketCount)
return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize))
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
dohash:
hash := t.Hasher(noescape(unsafe.Pointer(&ky)), uintptr(h.hash0))
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0])
+ return unsafe.Pointer(&zeroVal[0])
}
func mapaccess2_faststr(t *maptype, h *hmap, ky string) (unsafe.Pointer, bool) {
racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_faststr))
}
if h == nil || h.count == 0 {
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
if h.flags&hashWriting != 0 {
fatal("concurrent map read and map write")
return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
// long key, try not to do more comparisons than necessary
keymaybe := uintptr(abi.MapBucketCount)
return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize)), true
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
dohash:
hash := t.Hasher(noescape(unsafe.Pointer(&ky)), uintptr(h.hash0))
}
}
}
- return unsafe.Pointer(&abi.ZeroVal[0]), false
+ return unsafe.Pointer(&zeroVal[0]), false
}
func mapassign_faststr(t *maptype, h *hmap, s string) unsafe.Pointer {
package runtime
import (
+ "internal/abi"
"internal/runtime/atomic"
"unsafe"
)
//
//go:linkname getAuxv
func getAuxv() []uintptr { return auxv }
+
+// zeroVal is used by reflect via linkname.
+//
+//go:linkname zeroVal
+var zeroVal [abi.ZeroValSize]byte