text = fmt.Sprintf("esc(%d)", n.Esc())
}
- if e, ok := n.Opt().(*location); ok && e.loopDepth != 0 {
- if text != "" {
- text += " "
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if e, ok := n.Opt.(*location); ok && e.loopDepth != 0 {
+ if text != "" {
+ text += " "
+ }
+ text += fmt.Sprintf("ld(%d)", e.loopDepth)
}
- text += fmt.Sprintf("ld(%d)", e.loopDepth)
}
+
return text
}
// Record loop depth at declaration.
n := n.(*ir.Decl)
if !ir.IsBlank(n.X) {
- e.dcl(n.X)
+ e.dcl(n.X.(*ir.Name))
}
case ir.OLABEL:
var ks []hole
for _, cas := range n.Cases { // cases
if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
- cv := cas.Var
+ cv := cas.Var.(*ir.Name)
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
return loc.asHole()
}
-func (e *escape) dcl(n ir.Node) hole {
+func (e *escape) dcl(n *ir.Name) hole {
loc := e.oldLoc(n)
loc.loopDepth = e.loopDepth
return loc.asHole()
}
e.allLocs = append(e.allLocs, loc)
if n != nil {
- if n.Op() == ir.ONAME && n.Name().Curfn != e.curfn {
+ if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn)
- }
+ if n.Curfn != e.curfn {
+ base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn)
+ }
- if n.Opt() != nil {
- base.Fatalf("%v already has a location", n)
+ if n.Opt != nil {
+ base.Fatalf("%v already has a location", n)
+ }
+ n.Opt = loc
}
- n.SetOpt(loc)
if why := HeapAllocReason(n); why != "" {
e.flow(e.heapHole().addr(n, why), loc)
return loc
}
-func (e *escape) oldLoc(n ir.Node) *location {
- n = canonicalNode(n)
- return n.Opt().(*location)
+func (e *escape) oldLoc(n *ir.Name) *location {
+ n = canonicalNode(n).(*ir.Name)
+ return n.Opt.(*location)
}
func (l *location) asHole() hole {
if n == nil {
continue
}
- n.SetOpt(nil)
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ n.Opt = nil
+ }
// Update n.Esc based on escape analysis results.
return esc.Encode()
}
- n := ir.AsNode(f.Nname)
+ n := f.Nname.(*ir.Name)
loc := e.oldLoc(n)
esc := loc.paramEsc
esc.Optimize()
type miniExpr struct {
miniNode
typ *types.Type
- init Nodes // TODO(rsc): Don't require every Node to have an init
- opt interface{} // TODO(rsc): Don't require every Node to have an opt?
+ init Nodes // TODO(rsc): Don't require every Node to have an init
flags bitset8
}
miniExprTransient
miniExprBounded
miniExprImplicit // for use by implementations; not supported by every Expr
+ miniExprCheckPtr
)
func (*miniExpr) isExpr() {}
func (n *miniExpr) Type() *types.Type { return n.typ }
func (n *miniExpr) SetType(x *types.Type) { n.typ = x }
-func (n *miniExpr) Opt() interface{} { return n.opt }
-func (n *miniExpr) SetOpt(x interface{}) { n.opt = x }
func (n *miniExpr) HasCall() bool { return n.flags&miniExprHasCall != 0 }
func (n *miniExpr) SetHasCall(b bool) { n.flags.set(miniExprHasCall, b) }
func (n *miniExpr) NonNil() bool { return n.flags&miniExprNonNil != 0 }
func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
+func (n *ConvExpr) CheckPtr() bool { return n.flags&miniExprCheckPtr != 0 }
+func (n *ConvExpr) SetCheckPtr(b bool) { n.flags.set(miniExprCheckPtr, b) }
func (n *ConvExpr) SetOp(op Op) {
switch op {
func (n *miniNode) SetHasCall(bool) { panic(n.no("SetHasCall")) }
func (n *miniNode) NonNil() bool { return false }
func (n *miniNode) MarkNonNil() { panic(n.no("MarkNonNil")) }
-func (n *miniNode) Opt() interface{} { return nil }
-func (n *miniNode) SetOpt(interface{}) { panic(n.no("SetOpt")) }
Func *Func
Offset_ int64
val constant.Value
+ Opt interface{} // for use by escape analysis
orig Node
Embed *[]Embed // list of embedded files, for ONAME var
return n.val
}
-// SetVal sets the constant.Value for the node,
-// which must not have been used with SetOpt.
+// SetVal sets the constant.Value for the node.
func (n *Name) SetVal(v constant.Value) {
if n.op != OLITERAL {
panic(n.no("SetVal"))
SetEsc(x uint16)
Walkdef() uint8
SetWalkdef(x uint8)
- Opt() interface{}
- SetOpt(x interface{})
Diag() bool
SetDiag(x bool)
Typecheck() uint8
}
func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
- // Calling cheapexpr(n, init) below leads to a recursive call
- // to walkexpr, which leads us back here again. Use n.Opt to
+ // Calling cheapexpr(n, init) below leads to a recursive call to
+ // walkexpr, which leads us back here again. Use n.Checkptr to
// prevent infinite loops.
- if opt := n.Opt(); opt == &walkCheckPtrArithmeticMarker {
+ if n.CheckPtr() {
return n
- } else if opt != nil {
- // We use n.Opt() here because today it's not used for OCONVNOP. If that changes,
- // there's no guarantee that temporarily replacing it is safe, so just hard fail here.
- base.Fatalf("unexpected Opt: %v", opt)
}
- n.SetOpt(&walkCheckPtrArithmeticMarker)
- defer n.SetOpt(nil)
+ n.SetCheckPtr(true)
+ defer n.SetCheckPtr(false)
// TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr.
var wrapCall_prgen int
-var walkCheckPtrArithmeticMarker byte
-
// appendWalkStmt typechecks and walks stmt and then appends it to init.
func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
op := stmt.Op()