"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
+ "strings"
)
// call evaluates a call expressions, including builtin calls. ks
argument(e.tagHole(ks, fn, param), arg)
}
+ // hash/maphash.escapeForHash forces its argument to be on
+ // the heap, if it contains a non-string pointer. We cannot
+ // hash pointers to local variables, as the address of the
+ // local variable might change on stack growth.
+ // Strings are okay as the hash depends on only the content,
+ // not the pointer.
+ // The actual call we match is
+ // hash/maphash.escapeForHash[go.shape.T](dict, go.shape.T)
+ if fn != nil && fn.Sym().Pkg.Path == "hash/maphash" && strings.HasPrefix(fn.Sym().Name, "escapeForHash[") {
+ ps := fntype.Params()
+ if len(ps) == 2 && ps[1].Type.IsShape() {
+ if !hasNonStringPointers(ps[1].Type) {
+ argumentParam = func(param *types.Field, arg ir.Node) {
+ argument(e.discardHole(), arg)
+ }
+ } else {
+ argumentParam = func(param *types.Field, arg ir.Node) {
+ argument(e.heapHole(), arg)
+ }
+ }
+ }
+ }
+
args := call.Args
if recvParam := fntype.Recv(); recvParam != nil {
if recvArg == nil {
return e.teeHole(tagKs...)
}
+
+func hasNonStringPointers(t *types.Type) bool {
+ if !t.HasPointers() {
+ return false
+ }
+ switch t.Kind() {
+ case types.TSTRING:
+ return false
+ case types.TSTRUCT:
+ for _, f := range t.Fields() {
+ if hasNonStringPointers(f.Type) {
+ return true
+ }
+ }
+ return false
+ case types.TARRAY:
+ return hasNonStringPointers(t.Elem())
+ }
+ return true
+}
"go/constant"
"internal/buildcfg"
"strconv"
+ "strings"
"cmd/compile/internal/base"
"cmd/compile/internal/inline/inlheur"
case "panicrangestate":
cheap = true
}
+ case "hash/maphash":
+ if strings.HasPrefix(fn, "escapeForHash[") {
+ // hash/maphash.escapeForHash[T] is a compiler intrinsic
+ // implemented in the escape analysis phase.
+ cheap = true
+ }
}
}
// Special case for coverage counter updates; although
}
}
}
+
+ // hash/maphash.escapeForHash[T] is a compiler intrinsic implemented
+ // in the escape analysis phase.
+ if fn := ir.StaticCalleeName(call.Fun); fn != nil && fn.Sym().Pkg.Path == "hash/maphash" &&
+ strings.HasPrefix(fn.Sym().Name, "escapeForHash[") {
+ return false, true
+ }
+
if ir.IsIntrinsicCall(call) {
return false, true
}
return walkExpr(n, init)
}
+ if n.Op() == ir.OCALLFUNC {
+ fn := ir.StaticCalleeName(n.Fun)
+ if fn != nil && fn.Sym().Pkg.Path == "hash/maphash" && strings.HasPrefix(fn.Sym().Name, "escapeForHash[") {
+ // hash/maphash.escapeForHash[T] is a compiler intrinsic
+ // for the escape analysis to escape its argument based on
+ // the type. The call itself is no-op. Just walk the
+ // argument.
+ ps := fn.Type().Params()
+ if len(ps) == 2 && ps[1].Type.IsShape() {
+ return walkExpr(n.Args[1], init)
+ }
+ }
+ }
+
if name, ok := n.Fun.(*ir.Name); ok {
sym := name.Sym()
if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
package maphash
import (
- "internal/abi"
"internal/byteorder"
"math"
)
// such that Comparable(s, v1) == Comparable(s, v2) if v1 == v2.
// If v != v, then the resulting hash is randomly distributed.
func Comparable[T comparable](seed Seed, v T) uint64 {
- comparableReady(v)
+ escapeForHash(v)
return comparableHash(v, seed)
}
-func comparableReady[T comparable](v T) {
- // Force v to be on the heap.
- // We cannot hash pointers to local variables,
- // as the address of the local variable
- // might change on stack growth.
- abi.Escape(v)
-}
+// escapeForHash forces v to be on the heap, if v contains a
+// non-string pointer. We cannot hash pointers to local variables,
+// as the address of the local variable might change on stack growth.
+// Strings are okay as the hash depends on only the content, not
+// the pointer.
+//
+// This is essentially
+//
+// if hasNonStringPointers(T) { abi.Escape(v) }
+//
+// Implemented as a compiler intrinsic.
+func escapeForHash[T comparable](v T) { panic("intrinsic") }
// WriteComparable adds x to the data hashed by h.
func WriteComparable[T comparable](h *Hash, x T) {
- comparableReady(x)
+ escapeForHash(x)
// writeComparable (not in purego mode) directly operates on h.state
// without using h.buf. Mix in the buffer length so it won't
// commute with a buffered write, which either changes h.n or changes
"reflect"
)
+const purego = true
+
var hashkey [4]uint64
func init() {
"unsafe"
)
+const purego = false
+
//go:linkname runtime_rand runtime.rand
func runtime_rand() uint64
"bytes"
"fmt"
"hash"
+ "internal/asan"
"math"
"reflect"
"strings"
}
}
+func TestComparableAllocations(t *testing.T) {
+ if purego {
+ t.Skip("skip allocation test in purego mode - reflect-based implementation allocates more")
+ }
+ if asan.Enabled {
+ t.Skip("skip allocation test under -asan")
+ }
+ seed := MakeSeed()
+ x := heapStr(t)
+ allocs := testing.AllocsPerRun(10, func() {
+ s := "s" + x
+ Comparable(seed, s)
+ })
+ if allocs > 0 {
+ t.Errorf("got %v allocs, want 0", allocs)
+ }
+
+ type S struct {
+ a int
+ b string
+ }
+ allocs = testing.AllocsPerRun(10, func() {
+ s := S{123, "s" + x}
+ Comparable(seed, s)
+ })
+ if allocs > 0 {
+ t.Errorf("got %v allocs, want 0", allocs)
+ }
+}
+
// Make sure a Hash implements the hash.Hash and hash.Hash64 interfaces.
var _ hash.Hash = &Hash{}
var _ hash.Hash64 = &Hash{}