This is a redo of CL 41076 backported to the 1.8 release branch.
There were major conflicts, so I had to basically rewrite it again
from scratch. The way Progs are allocated changed. Liveness analysis
and Prog generation got reordered. Liveness analysis changed from
running on gc.BasicBlock to ssa.Block. All that makes the logic quite
a bit different.
Please review carefully.
From CL 41076:
At VARKILLs, zero a variable if it is ambiguously live.
After the VARKILL anything this variable references
might be collected. If it were to become live again later,
the GC will see references to already-collected objects.
We don't know a variable is ambiguously live until very
late in compilation (after lowering, register allocation, ...),
so it is hard to generate the code in an arch-independent way.
We also have to be careful not to clobber any registers.
Fortunately, this almost never happens so performance is ~irrelevant.
There are only 2 instances where this triggers in the stdlib.
Fixes #20029
Change-Id: Ibb757eec58ee07f40df5e561b19d315684dc4bda
Reviewed-on: https://go-review.googlesource.com/43998
Run-TryBot: Keith Randall <khr@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
gc.Thearch.SSAMarkMoves = ssaMarkMoves
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ op := x86.AMOVQ
+ if gc.Widthptr == 4 {
+ op = x86.AMOVL
+ }
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += int64(gc.Widthptr) {
+ p := gc.AddAsmAfter(op, pp)
+ pp = p
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = 0
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = x86.REG_SP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
// This is actually not the x86 NOP anymore,
// but at the point where it gets used, AX is dead
gc.Thearch.SSAMarkMoves = func(s *gc.SSAGenState, b *ssa.Block) {}
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ p := gc.Prog(arm.AMOVW)
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = 0
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = arm.REGTMP
+ for i := int64(0); i < size; i += 4 {
+ p := gc.AddAsmAfter(arm.AMOVW, pp)
+ pp = p
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = arm.REGTMP
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = arm.REGSP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
func ginsnop() {
p := gc.Prog(arm.AAND)
gc.Thearch.SSAMarkMoves = func(s *gc.SSAGenState, b *ssa.Block) {}
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += 8 {
+ p := gc.AddAsmAfter(arm64.AMOVD, pp)
+ pp = p
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = arm64.REGZERO
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = arm64.REGSP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
p := gc.Prog(arm64.AHINT)
p.From.Type = obj.TYPE_CONST
// SSAGenBlock emits end-of-block Progs. SSAGenValue should be called
// for all values in the block before SSAGenBlock.
SSAGenBlock func(s *SSAGenState, b, next *ssa.Block)
+
+ // ZeroAuto emits code to zero the given auto stack variable.
+ // Code is added immediately after pp.
+ // ZeroAuto must not use any non-temporary registers.
+ // ZeroAuto will only be called for variables which contain a pointer.
+ ZeroAuto func(n *Node, pp *obj.Prog)
}
var pcloc int32
return q
}
+func AddAsmAfter(as obj.As, p *obj.Prog) *obj.Prog {
+ q := Ctxt.NewProg()
+ Clearp(q)
+ q.As = as
+ q.Link = p.Link
+ p.Link = q
+ return q
+}
+
func ggloblnod(nam *Node) {
s := Linksym(nam.Sym)
s.Gotype = Linksym(ngotype(nam))
}
func removevardef(firstp *obj.Prog) {
+ // At VARKILLs, zero variable if it is ambiguously live.
+ // After the VARKILL anything this variable references
+ // might be collected. If it were to become live again later,
+ // the GC will see references to already-collected objects.
+ // See issue 20029.
for p := firstp; p != nil; p = p.Link {
+ if p.As != obj.AVARKILL {
+ continue
+ }
+ n := p.To.Node.(*Node)
+ if !n.Name.Needzero {
+ continue
+ }
+ if n.Class != PAUTO {
+ Fatalf("zero of variable which isn't PAUTO %v", n)
+ }
+ if n.Type.Size()%int64(Widthptr) != 0 {
+ Fatalf("zero of variable not a multiple of ptr size %v", n)
+ }
+ Thearch.ZeroAuto(n, p)
+ }
+
+ for p := firstp; p != nil; p = p.Link {
+
for p.Link != nil && (p.Link.As == obj.AVARDEF || p.Link.As == obj.AVARKILL || p.Link.As == obj.AVARLIVE) {
p.Link = p.Link.Link
}
gc.Thearch.SSAMarkMoves = func(s *gc.SSAGenState, b *ssa.Block) {}
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += 4 {
+ p := gc.AddAsmAfter(mips.AMOVW, pp)
+ pp = p
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = mips.REGZERO
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = mips.REGSP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
p := gc.Prog(mips.ANOR)
p.From.Type = obj.TYPE_REG
gc.Thearch.SSAMarkMoves = func(s *gc.SSAGenState, b *ssa.Block) {}
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += 8 {
+ p := gc.AddAsmAfter(mips.AMOVV, pp)
+ pp = p
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = mips.REGZERO
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = mips.REGSP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
p := gc.Prog(mips.ANOR)
p.From.Type = obj.TYPE_REG
gc.Thearch.SSAMarkMoves = ssaMarkMoves
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
initvariants()
initproginfo()
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += 8 {
+ p := gc.AddAsmAfter(ppc64.AMOVD, pp)
+ pp = p
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = ppc64.REGZERO
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = ppc64.REGSP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
p := gc.Prog(ppc64.AOR)
p.From.Type = obj.TYPE_REG
gc.Thearch.SSAMarkMoves = ssaMarkMoves
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ p := gc.AddAsmAfter(s390x.ACLEAR, pp)
+ pp = p
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = n.Type.Size()
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = s390x.REGSP
+ p.To.Offset = n.Xoffset
+ p.To.Sym = gc.Linksym(n.Sym)
+}
+
func ginsnop() {
p := gc.Prog(s390x.AOR)
p.From.Type = obj.TYPE_REG
gc.Thearch.SSAMarkMoves = ssaMarkMoves
gc.Thearch.SSAGenValue = ssaGenValue
gc.Thearch.SSAGenBlock = ssaGenBlock
+ gc.Thearch.ZeroAuto = zeroAuto
}
return p
}
+func zeroAuto(n *gc.Node, pp *obj.Prog) {
+ // Note: this code must not clobber any registers.
+ sym := gc.Linksym(n.Sym)
+ size := n.Type.Size()
+ for i := int64(0); i < size; i += 4 {
+ p := gc.AddAsmAfter(x86.AMOVL, pp)
+ pp = p
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = 0
+ p.To.Type = obj.TYPE_MEM
+ p.To.Name = obj.NAME_AUTO
+ p.To.Reg = x86.REG_SP
+ p.To.Offset = n.Xoffset + i
+ p.To.Sym = sym
+ }
+}
+
func ginsnop() {
p := gc.Prog(x86.AXCHGL)
p.From.Type = obj.TYPE_REG
--- /dev/null
+// run
+
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Issue 20029: make sure we zero at VARKILLs of
+// ambiguously live variables.
+// The ambiguously live variable here is the hiter
+// for the inner range loop.
+
+package main
+
+import "runtime"
+
+func f(m map[int]int) {
+outer:
+ for i := 0; i < 10; i++ {
+ for k := range m {
+ if k == 5 {
+ continue outer
+ }
+ }
+ runtime.GC()
+ break
+ }
+ runtime.GC()
+}
+func main() {
+ m := map[int]int{1: 2, 2: 3, 3: 4}
+ f(m)
+}