From 19ed0d993cf7b0df804c4c2e96dc674da4059e03 Mon Sep 17 00:00:00 2001 From: Diogo Pinela Date: Tue, 3 Mar 2020 21:03:40 +0000 Subject: [PATCH] cmd/compile: use staticuint64s instead of staticbytes MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit There are still two places in src/runtime/string.go that use staticbytes, so we cannot delete it just yet. There is a new codegen test to verify that the index calculation is constant-folded, at least on amd64. ppc64, mips[64] and s390x cannot currently do that. There is also a new runtime benchmark to ensure that this does not slow down performance (tested against parent commit): name old time/op new time/op delta ConvT2EByteSized/bool-4 1.07ns ± 1% 1.07ns ± 1% ~ (p=0.060 n=14+15) ConvT2EByteSized/uint8-4 1.06ns ± 1% 1.07ns ± 1% ~ (p=0.095 n=14+15) Updates #37612 Change-Id: I5ec30738edaa48cda78dfab4a78e24a32fa7fd6a Reviewed-on: https://go-review.googlesource.com/c/go/+/221957 Run-TryBot: Ian Lance Taylor TryBot-Result: Gobot Gobot Reviewed-by: Josh Bleecher Snyder --- src/cmd/compile/internal/gc/go.go | 2 +- src/cmd/compile/internal/gc/walk.go | 37 ++++++++++++++++++++--------- src/runtime/iface_test.go | 14 +++++++++++ test/codegen/smallintiface.go | 22 +++++++++++++++++ 4 files changed, 63 insertions(+), 12 deletions(-) create mode 100644 test/codegen/smallintiface.go diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go index 50b866ca65..85c857c214 100644 --- a/src/cmd/compile/internal/gc/go.go +++ b/src/cmd/compile/internal/gc/go.go @@ -279,7 +279,7 @@ type Arch struct { var thearch Arch var ( - staticbytes, + staticuint64s, zerobase *Node assertE2I, diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go index d468f241f9..14af03f58c 100644 --- a/src/cmd/compile/internal/gc/walk.go +++ b/src/cmd/compile/internal/gc/walk.go @@ -837,10 +837,12 @@ opswitch: break } - if staticbytes == nil { - staticbytes = newname(Runtimepkg.Lookup("staticbytes")) - staticbytes.SetClass(PEXTERN) - staticbytes.Type = types.NewArray(types.Types[TUINT8], 256) + if staticuint64s == nil { + staticuint64s = newname(Runtimepkg.Lookup("staticuint64s")) + staticuint64s.SetClass(PEXTERN) + // The actual type is [256]uint64, but we use [256*8]uint8 so we can address + // individual bytes. + staticuint64s.Type = types.NewArray(types.Types[TUINT8], 256*8) zerobase = newname(Runtimepkg.Lookup("zerobase")) zerobase.SetClass(PEXTERN) zerobase.Type = types.Types[TUINTPTR] @@ -856,9 +858,16 @@ opswitch: cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246. value = zerobase case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()): - // n.Left is a bool/byte. Use staticbytes[n.Left]. + // n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian + // and staticuint64s[n.Left * 8 + 7] on big-endian. n.Left = cheapexpr(n.Left, init) - value = nod(OINDEX, staticbytes, byteindex(n.Left)) + // byteindex widens n.Left so that the multiplication doesn't overflow. + index := nod(OLSH, byteindex(n.Left), nodintconst(3)) + index.SetBounded(true) + if thearch.LinkArch.ByteOrder == binary.BigEndian { + index = nod(OADD, index, nodintconst(7)) + } + value = nod(OINDEX, staticuint64s, index) value.SetBounded(true) case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly(): // n.Left is a readonly global; use it directly. @@ -2423,15 +2432,21 @@ func convnop(n *Node, t *types.Type) *Node { return n } -// byteindex converts n, which is byte-sized, to a uint8. -// We cannot use conv, because we allow converting bool to uint8 here, +// byteindex converts n, which is byte-sized, to an int used to index into an array. +// We cannot use conv, because we allow converting bool to int here, // which is forbidden in user code. func byteindex(n *Node) *Node { - if types.Identical(n.Type, types.Types[TUINT8]) { - return n + // We cannot convert from bool to int directly. + // While converting from int8 to int is possible, it would yield + // the wrong result for negative values. + // Reinterpreting the value as an unsigned byte solves both cases. + if !types.Identical(n.Type, types.Types[TUINT8]) { + n = nod(OCONV, n, nil) + n.Type = types.Types[TUINT8] + n.SetTypecheck(1) } n = nod(OCONV, n, nil) - n.Type = types.Types[TUINT8] + n.Type = types.Types[TINT] n.SetTypecheck(1) return n } diff --git a/src/runtime/iface_test.go b/src/runtime/iface_test.go index 73beebffe2..4fab6c968a 100644 --- a/src/runtime/iface_test.go +++ b/src/runtime/iface_test.go @@ -95,6 +95,19 @@ func BenchmarkNeIfaceConcrete(b *testing.B) { } } +func BenchmarkConvT2EByteSized(b *testing.B) { + b.Run("bool", func(b *testing.B) { + for i := 0; i < b.N; i++ { + e = yes + } + }) + b.Run("uint8", func(b *testing.B) { + for i := 0; i < b.N; i++ { + e = eight8 + } + }) +} + func BenchmarkConvT2ESmall(b *testing.B) { for i := 0; i < b.N; i++ { e = ts @@ -310,6 +323,7 @@ func TestZeroConvT2x(t *testing.T) { var ( eight8 uint8 = 8 eight8I T8 = 8 + yes bool = true zero16 uint16 = 0 zero16I T16 = 0 diff --git a/test/codegen/smallintiface.go b/test/codegen/smallintiface.go new file mode 100644 index 0000000000..0207a0af79 --- /dev/null +++ b/test/codegen/smallintiface.go @@ -0,0 +1,22 @@ +// asmcheck + +package codegen + +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +func booliface() interface{} { + // amd64:`LEAQ\truntime.staticuint64s\+8\(SB\)` + return true +} + +func smallint8iface() interface{} { + // amd64:`LEAQ\truntime.staticuint64s\+2024\(SB\)` + return int8(-3) +} + +func smalluint8iface() interface{} { + // amd64:`LEAQ\truntime.staticuint64s\+24\(SB\)` + return uint8(3) +} -- 2.50.0