import (
"internal/abi"
- "internal/goarch"
+ "internal/runtime/maps"
"unsafe"
)
// The runtime hasher only works on uintptr. For 64-bit
// architectures, we use the hasher directly. Otherwise,
// we use two parallel hashers on the lower and upper 32 bits.
- if goarch.PtrSize == 8 {
+ if maps.Use64BitHash {
return uint64(runtime_memhash(unsafe.Pointer(&buf[0]), uintptr(seed), uintptr(len)))
}
- lo := runtime_memhash(unsafe.Pointer(&buf[0]), uintptr(seed), uintptr(len))
+ lo := runtime_memhash(unsafe.Pointer(&buf[0]), uintptr(uint32(seed)), uintptr(len))
hi := runtime_memhash(unsafe.Pointer(&buf[0]), uintptr(seed>>32), uintptr(len))
return uint64(hi)<<32 | uint64(lo)
}
var m map[T]struct{}
mTyp := abi.TypeOf(m)
hasher := (*abi.MapType)(unsafe.Pointer(mTyp)).Hasher
- if goarch.PtrSize == 8 {
+ if maps.Use64BitHash {
return uint64(hasher(abi.NoEscape(unsafe.Pointer(&v)), uintptr(s)))
}
- lo := hasher(abi.NoEscape(unsafe.Pointer(&v)), uintptr(s))
+ lo := hasher(abi.NoEscape(unsafe.Pointer(&v)), uintptr(uint32(s)))
hi := hasher(abi.NoEscape(unsafe.Pointer(&v)), uintptr(s>>32))
return uint64(hi)<<32 | uint64(lo)
}
import (
"fmt"
+ "internal/runtime/maps"
"internal/testenv"
"math"
"math/rand"
"slices"
"strings"
"testing"
- "unsafe"
)
// Smhasher is a torture test for hash functions.
// Make sure different seed values generate different hashes.
func TestSmhasherSeed(t *testing.T) {
- if unsafe.Sizeof(uintptr(0)) == 4 {
+ if !maps.Use64BitHash {
t.Skip("32-bit platforms don't have ideal seed-input distributions (see issue 33988)")
}
t.Parallel()
clearSeq uint64
}
+// Use 64-bit hash on 64-bit systems, except on Wasm, where we use
+// 32-bit hash (see runtime/hash32.go).
+const Use64BitHash = goarch.PtrSize == 8 && goarch.IsWasm == 0
+
func depthToShift(depth uint8) uint8 {
- if goarch.PtrSize == 4 {
+ if !Use64BitHash {
return 32 - depth
}
return 64 - depth
import (
"internal/abi"
- "internal/goarch"
"internal/runtime/math"
"unsafe"
)
// Bitmask for the last selection bit at this depth.
func localDepthMask(localDepth uint8) uintptr {
- if goarch.PtrSize == 4 {
+ if !Use64BitHash {
return uintptr(1) << (32 - localDepth)
}
return uintptr(1) << (64 - localDepth)
)
const (
- c0 = uintptr((8-goarch.PtrSize)/4*2860486313 + (goarch.PtrSize-4)/4*33054211828000289)
- c1 = uintptr((8-goarch.PtrSize)/4*3267000013 + (goarch.PtrSize-4)/4*23344194077549503)
+ // We use 32-bit hash on Wasm, see hash32.go.
+ hashSize = (1-goarch.IsWasm)*goarch.PtrSize + goarch.IsWasm*4
+ c0 = uintptr((8-hashSize)/4*2860486313 + (hashSize-4)/4*33054211828000289)
+ c1 = uintptr((8-hashSize)/4*3267000013 + (hashSize-4)/4*23344194077549503)
)
+func trimHash(h uintptr) uintptr {
+ if goarch.IsWasm != 0 {
+ // On Wasm, we use 32-bit hash, despite that uintptr is 64-bit.
+ // memhash* always returns a uintptr with high 32-bit being 0
+ // (see hash32.go). We trim the hash in other places where we
+ // compute the hash manually, e.g. in interhash.
+ return uintptr(uint32(h))
+ }
+ return h
+}
+
func memhash0(p unsafe.Pointer, h uintptr) uintptr {
return h
}
f := *(*float32)(p)
switch {
case f == 0:
- return c1 * (c0 ^ h) // +0, -0
+ return trimHash(c1 * (c0 ^ h)) // +0, -0
case f != f:
- return c1 * (c0 ^ h ^ uintptr(rand())) // any kind of NaN
+ return trimHash(c1 * (c0 ^ h ^ uintptr(rand()))) // any kind of NaN
default:
return memhash(p, h, 4)
}
f := *(*float64)(p)
switch {
case f == 0:
- return c1 * (c0 ^ h) // +0, -0
+ return trimHash(c1 * (c0 ^ h)) // +0, -0
case f != f:
- return c1 * (c0 ^ h ^ uintptr(rand())) // any kind of NaN
+ return trimHash(c1 * (c0 ^ h ^ uintptr(rand()))) // any kind of NaN
default:
return memhash(p, h, 8)
}
panic(errorString("hash of unhashable type " + toRType(t).string()))
}
if t.IsDirectIface() {
- return c1 * typehash(t, unsafe.Pointer(&a.data), h^c0)
+ return trimHash(c1 * typehash(t, unsafe.Pointer(&a.data), h^c0))
} else {
- return c1 * typehash(t, a.data, h^c0)
+ return trimHash(c1 * typehash(t, a.data, h^c0))
}
}
panic(errorString("hash of unhashable type " + toRType(t).string()))
}
if t.IsDirectIface() {
- return c1 * typehash(t, unsafe.Pointer(&a.data), h^c0)
+ return trimHash(c1 * typehash(t, unsafe.Pointer(&a.data), h^c0))
} else {
- return c1 * typehash(t, a.data, h^c0)
+ return trimHash(c1 * typehash(t, a.data, h^c0))
}
}
// Hashing algorithm inspired by
// wyhash: https://github.com/wangyi-fudan/wyhash/blob/ceb019b530e2c1c14d70b79bfa2bc49de7d95bc1/Modern%20Non-Cryptographic%20Hash%20Function%20and%20Pseudorandom%20Number%20Generator.pdf
-//go:build 386 || arm || mips || mipsle
+//go:build 386 || arm || mips || mipsle || wasm
package runtime
// Hashing algorithm inspired by
// wyhash: https://github.com/wangyi-fudan/wyhash
-//go:build amd64 || arm64 || loong64 || mips64 || mips64le || ppc64 || ppc64le || riscv64 || s390x || wasm
+//go:build amd64 || arm64 || loong64 || mips64 || mips64le || ppc64 || ppc64le || riscv64 || s390x
package runtime