* replace a copy of IsMethod with a call of it.
* a few more switches where they simplify the code.
* prefer composite literals over "n := new(...); n.x = y; ...".
* use defers to get rid of three goto labels.
* rewrite updateHasCall into two funcs to remove gotos.
Passes toolstash-check on std cmd.
Change-Id: Icb5442a89a87319ef4b640bbc5faebf41b193ef1
Reviewed-on: https://go-review.googlesource.com/72070
Run-TryBot: Daniel Martí <mvdan@mvdan.cc>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
return convlit(n, t)
}
- if n.Val().Ctype() == CTNIL {
+ switch n.Val().Ctype() {
+ case CTNIL:
lineno = lno
if !n.Diag() {
yyerror("use of untyped nil")
}
n.Type = nil
- break
- }
-
- if n.Val().Ctype() == CTSTR {
+ case CTSTR:
t1 := types.Types[TSTRING]
n = convlit1(n, t1, false, reuse)
- break
+ default:
+ yyerror("defaultlit: unknown literal: %v", n)
}
- yyerror("defaultlit: unknown literal: %v", n)
-
case CTxxx:
Fatalf("defaultlit: idealkind is CTxxx: %+v", n)
// allowed level when a loop is encountered. Using -2 suffices to
// pass all the tests we have written so far, which we assume matches
// the level of complexity we want the escape analysis code to handle.
-const (
- MinLevel = -2
-)
+const MinLevel = -2
// A Level encodes the reference state and context applied to
// (stack, heap) allocated memory.
if (ctxt != PEXTERN && ctxt != PFUNC) || dclcontext != PEXTERN {
return
}
- if n.Type != nil && n.Type.IsKind(TFUNC) && n.Type.Recv() != nil { // method
+ if n.Type != nil && n.Type.IsKind(TFUNC) && n.IsMethod() {
return
}
}
func (pp *Progs) NewProg() *obj.Prog {
+ var p *obj.Prog
if pp.cacheidx < len(pp.progcache) {
- p := &pp.progcache[pp.cacheidx]
- p.Ctxt = Ctxt
+ p = &pp.progcache[pp.cacheidx]
pp.cacheidx++
- return p
+ } else {
+ p = new(obj.Prog)
}
- p := new(obj.Prog)
p.Ctxt = Ctxt
return p
}
)
// architecture-independent object file output
-const (
- ArhdrSize = 60
-)
+const ArhdrSize = 60
func formathdr(arhdr []byte, name string, size int64) {
copy(arhdr[:], fmt.Sprintf("%-16s%-12d%-6d%-6d%-8o%-10d`\n", name, 0, 0, 0, 0644, size))
fmt.Printf("can't create %s: %v\n", outfile, err)
errorexit()
}
+ defer bout.Close()
startobj := int64(0)
var arhdr [ArhdrSize]byte
}
if mode&modeLinkerObj == 0 {
- bout.Close()
return
}
formathdr(arhdr[:], "_go_.o", size)
bout.Write(arhdr[:])
}
-
- bout.Close()
}
func addptabs() {
a.Nbody.Set1(mkcall("block", nil, &ln))
l = ln.Slice()
a = typecheck(a, Etop)
- l = append(l, a)
- l = append(l, n)
+ l = append(l, a, n)
}
l = append(l, cas.Nbody.Slice()...)
func (e *ssafn) splitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
s := &types.Sym{Name: parent.N.(*Node).Sym.Name + suffix, Pkg: localpkg}
- n := new(Node)
- n.Name = new(Name)
- n.Op = ONAME
- n.Pos = parent.N.(*Node).Pos
+ n := &Node{
+ Name: new(Name),
+ Op: ONAME,
+ Pos: parent.N.(*Node).Pos,
+ }
n.Orig = n
s.Def = asTypesNode(n)
if n == nil {
return
}
+ n.SetHasCall(calcHasCall(n))
+}
- b := false
+func calcHasCall(n *Node) bool {
if n.Ninit.Len() != 0 {
// TODO(mdempsky): This seems overly conservative.
- b = true
- goto out
+ return true
}
switch n.Op {
case OLITERAL, ONAME, OTYPE:
- if b || n.HasCall() {
+ if n.HasCall() {
Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n)
}
- return
+ return false
case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER:
- b = true
- goto out
+ return true
case OANDAND, OOROR:
// hard with instrumented code
if instrumenting {
- b = true
- goto out
+ return true
}
case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR,
OIND, ODOTPTR, ODOTTYPE, ODIV, OMOD:
// These ops might panic, make sure they are done
// before we start marshaling args for a call. See issue 16760.
- b = true
- goto out
+ return true
}
if n.Left != nil && n.Left.HasCall() {
- b = true
- goto out
+ return true
}
if n.Right != nil && n.Right.HasCall() {
- b = true
- goto out
+ return true
}
-
-out:
- n.SetHasCall(b)
+ return false
}
func badtype(op Op, tl *types.Type, tr *types.Type) {
return
}
t.SetRecur(true)
+ defer t.SetRecur(false)
var u *types.Type
d--
// below for embedded fields.
c = lookdot0(s, t, save, ignorecase)
if c != 0 {
- goto out
+ return c, false
}
}
u = u.Elem()
}
if !u.IsStruct() && !u.IsInterface() {
- goto out
+ return c, false
}
for _, f := range u.Fields().Slice() {
}
if d < 0 {
// Found an embedded field at target depth.
- more = true
- goto out
+ return c, true
}
a, more1 := adddot1(s, f.Type, d, save, ignorecase)
if a != 0 && c == 0 {
}
}
-out:
- t.SetRecur(false)
return c, more
}
u = u.Elem()
}
- if !u.IsStruct() && !u.IsInterface() {
- goto out
- }
-
- for _, f := range u.Fields().Slice() {
- if f.Embedded == 0 {
- continue
- }
- if f.Sym == nil {
- continue
+ if u.IsStruct() || u.IsInterface() {
+ for _, f := range u.Fields().Slice() {
+ if f.Embedded == 0 {
+ continue
+ }
+ if f.Sym == nil {
+ continue
+ }
+ expand1(f.Type, false, followptr)
}
- expand1(f.Type, false, followptr)
}
-out:
t.SetRecur(false)
}
)
// The constant is known to runtime.
-const (
- tmpstringbufsize = 32
-)
+const tmpstringbufsize = 32
func walk(fn *Node) {
Curfn = fn
if n.Op != OAS {
Fatalf("convas: not OAS %v", n.Op)
}
+ defer updateHasCall(n)
n.SetTypecheck(1)
- var lt *types.Type
- var rt *types.Type
if n.Left == nil || n.Right == nil {
- goto out
+ return n
}
- lt = n.Left.Type
- rt = n.Right.Type
+ lt := n.Left.Type
+ rt := n.Right.Type
if lt == nil || rt == nil {
- goto out
+ return n
}
if isblank(n.Left) {
n.Right = defaultlit(n.Right, nil)
- goto out
+ return n
}
if !eqtype(lt, rt) {
}
dowidth(n.Right.Type)
-out:
- updateHasCall(n)
return n
}
}
}
- break
+ return n
}
- return n
}
// Is it possible that the computation of n might be