if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
- switch n.Class_ {
+ switch n.Class {
case ir.PAUTO:
if !n.Used() {
// Text == nil -> generating abstract function
if c == '.' || n.Type().IsUntyped() {
continue
}
- if n.Class_ == ir.PPARAM && !ssagen.TypeOK(n.Type()) {
+ if n.Class == ir.PPARAM && !ssagen.TypeOK(n.Type()) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
typename := dwarf.InfoPrefix + types.TypeSymName(n.Type())
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
- isReturnValue := (n.Class_ == ir.PPARAMOUT)
- if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
+ isReturnValue := (n.Class == ir.PPARAMOUT)
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
- } else if n.Class_ == ir.PAUTOHEAP {
+ } else if n.Class == ir.PAUTOHEAP {
// If dcl in question has been promoted to heap, do a bit
// of extra work to recover original class (auto or param);
// see issue 30908. This insures that we get the proper
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Stackcopy
- if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) {
+ if stackcopy != nil && (stackcopy.Class == ir.PPARAM || stackcopy.Class == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
- isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT)
+ isReturnValue = (stackcopy.Class == ir.PPARAMOUT)
}
}
inlIndex := 0
var abbrev int
var offs int64
- switch n.Class_ {
+ switch n.Class {
case ir.PAUTO:
offs = n.FrameOffset()
abbrev = dwarf.DW_ABRV_AUTO
abbrev = dwarf.DW_ABRV_PARAM
offs = n.FrameOffset() + base.Ctxt.FixedFrameSize()
default:
- base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class_, n)
+ base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class, n)
}
typename := dwarf.InfoPrefix + types.TypeSymName(n.Type())
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym().Name,
- IsReturnValue: n.Class_ == ir.PPARAMOUT,
+ IsReturnValue: n.Class == ir.PPARAMOUT,
IsInlFormal: n.InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
n := debug.Vars[varID]
var abbrev int
- switch n.Class_ {
+ switch n.Class {
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
case ir.PPARAM, ir.PPARAMOUT:
declpos := base.Ctxt.InnermostPos(n.Pos())
dvar := &dwarf.Var{
Name: n.Sym().Name,
- IsReturnValue: n.Class_ == ir.PPARAMOUT,
+ IsReturnValue: n.Class == ir.PPARAMOUT,
IsInlFormal: n.InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class_ == ir.PFUNC || n.Class_ == ir.PEXTERN {
+ if n.Class == ir.PFUNC || n.Class == ir.PEXTERN {
return
}
e.flow(k, e.oldLoc(n))
base.Fatalf("unexpected addr: %v", n)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class_ == ir.PEXTERN {
+ if n.Class == ir.PEXTERN {
break
}
k = e.oldLoc(n).asHole()
switch call.Op() {
case ir.OCALLFUNC:
switch v := ir.StaticValue(call.X); {
- case v.Op() == ir.ONAME && v.(*ir.Name).Class_ == ir.PFUNC:
+ case v.Op() == ir.ONAME && v.(*ir.Name).Class == ir.PFUNC:
fn = v.(*ir.Name)
case v.Op() == ir.OCLOSURE:
fn = v.(*ir.ClosureExpr).Func.Nname
}
func (l *location) isName(c ir.Class) bool {
- return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class_ == c
+ return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c
}
const numEscResults = 7
// Parameters are always passed via the stack.
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
return ""
}
}
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
// on PPARAM it means something different.
- if n.Class_ == ir.PAUTO && n.Esc() == ir.EscNever {
+ if n.Class == ir.PAUTO && n.Esc() == ir.EscNever {
break
}
break
}
- if n.Class_ != ir.PPARAM && n.Class_ != ir.PPARAMOUT && n.Class_ != ir.PAUTO {
+ if n.Class != ir.PPARAM && n.Class != ir.PPARAMOUT && n.Class != ir.PAUTO {
break
}
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
- if n.Class_ == ir.PAUTOHEAP {
+ if n.Class == ir.PAUTOHEAP {
ir.Dump("n", n)
base.Fatalf("double move to heap")
}
// Parameters have a local stack copy used at function start/end
// in addition to the copy in the heap that may live longer than
// the function.
- if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
if n.FrameOffset() == types.BADWIDTH {
base.Fatalf("addrescapes before param assignment")
}
stackcopy := typecheck.NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
- stackcopy.Class_ = n.Class_
+ stackcopy.Class = n.Class
stackcopy.Heapaddr = heapaddr
- if n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAMOUT {
// Make sure the pointer to the heap copy is kept live throughout the function.
// The function could panic at any point, and then a defer could recover.
// Thus, we need the pointer to the heap copy always available so the
}
// Parameters are before locals, so can stop early.
// This limits the search even in functions with many local variables.
- if d.Class_ == ir.PAUTO {
+ if d.Class == ir.PAUTO {
break
}
}
}
// Modify n in place so that uses of n now mean indirection of the heapaddr.
- n.Class_ = ir.PAUTOHEAP
+ n.Class = ir.PAUTOHEAP
n.SetFrameOffset(0)
n.Heapaddr = heapaddr
n.SetEsc(ir.EscHeap)
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
n := typecheck.NewName(s)
- n.Class_ = which
+ n.Class = which
field.Nname = n
n.SetType(t)
return field
// because symbols must be allocated before the parallel
// phase of the compiler.
for _, n := range fn.Dcl {
- switch n.Class_ {
+ switch n.Class {
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if liveness.ShouldTrack(n) && n.Addrtaken() {
reflectdata.WriteType(n.Type())
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- if n.Class_ == ir.PFUNC {
+ if n.Class == ir.PFUNC {
inline.Inline_Flood(n, typecheck.Export)
}
}
if n.Type() == nil {
base.Fatalf("external %v nil type\n", n)
}
- if n.Class_ == ir.PFUNC {
+ if n.Class == ir.PFUNC {
return
}
if n.Sym().Pkg != types.LocalPkg {
if n == nil {
return
}
- if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC {
- base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class_)
+ if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
+ base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class)
}
fn := n.Func
if fn == nil {
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class_ {
+ switch n.Class {
case ir.PFUNC:
Inline_Flood(n, exportsym)
exportsym(n)
// runtime.throw is a "cheap call" like panic in normal code.
if n.X.Op() == ir.ONAME {
name := n.X.(*ir.Name)
- if name.Class_ == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
+ if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
fn := name.Sym().Name
if fn == "getcallerpc" || fn == "getcallersp" {
return errors.New("call to " + fn)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class_ == ir.PAUTO {
+ if n.Class == ir.PAUTO {
v.usedLocals[n] = true
}
return n.Func
case ir.ONAME:
fn := fn.(*ir.Name)
- if fn.Class_ == ir.PFUNC {
+ if fn.Class == ir.PFUNC {
return fn.Func
}
case ir.OCLOSURE:
if ln.Op() != ir.ONAME {
continue
}
- if ln.Class_ == ir.PPARAMOUT { // return values handled below.
+ if ln.Class == ir.PPARAMOUT { // return values handled below.
continue
}
if ir.IsParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap
inlf := typecheck.Expr(inlvar(ln)).(*ir.Name)
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
- if ln.Class_ == ir.PPARAM {
+ if ln.Class == ir.PPARAM {
inlf.Name().SetInlFormal(true)
} else {
inlf.Name().SetInlLocal(true)
n := typecheck.NewName(var_.Sym())
n.SetType(var_.Type())
- n.Class_ = ir.PAUTO
+ n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
n.SetAddrtaken(var_.Addrtaken())
func retvar(t *types.Field, i int) *ir.Name {
n := typecheck.NewName(typecheck.LookupNum("~R", i))
n.SetType(t.Type)
- n.Class_ = ir.PAUTO
+ n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
func argvar(t *types.Type, i int) ir.Node {
n := typecheck.NewName(typecheck.LookupNum("~arg", i))
n.SetType(t.Elem())
- n.Class_ = ir.PAUTO
+ n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
s := make([]*ir.Name, 0, len(ll))
for _, n := range ll {
- if n.Class_ == ir.PAUTO {
+ if n.Class == ir.PAUTO {
if _, found := vis.usedLocals[n]; !found {
continue
}
panic(n.no("FuncName"))
}
fn := NewNameAt(n.Selection.Pos, MethodSym(n.X.Type(), n.Sel))
- fn.Class_ = PFUNC
+ fn.Class = PFUNC
fn.SetType(n.Type())
return fn
}
case ONAME:
n := n.(*Name)
- if n.Class_ == PFUNC {
+ if n.Class == PFUNC {
return false
}
return true
return nil
}
n := nn.(*Name)
- if n.Class_ != PAUTO || n.Addrtaken() {
+ if n.Class != PAUTO || n.Addrtaken() {
return nil
}
// MarkFunc marks a node as a function.
func MarkFunc(n *Name) {
- if n.Op() != ONAME || n.Class_ != Pxxx {
+ if n.Op() != ONAME || n.Class != Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
- n.Class_ = PFUNC
+ n.Class = PFUNC
n.Sym().SetFunc(true)
}
type Name struct {
miniExpr
BuiltinOp Op // uint8
- Class_ Class // uint8
+ Class Class // uint8
pragma PragmaFlag // int16
flags bitset16
sym *types.Sym
func (n *Name) SetSym(x *types.Sym) { n.sym = x }
func (n *Name) SubOp() Op { return n.BuiltinOp }
func (n *Name) SetSubOp(x Op) { n.BuiltinOp = x }
-func (n *Name) Class() Class { return n.Class_ }
-func (n *Name) SetClass(x Class) { n.Class_ = x }
func (n *Name) SetFunc(x *Func) { n.Func = x }
func (n *Name) Offset() int64 { panic("Name.Offset") }
func (n *Name) SetOffset(x int64) {
return false
}
name := n.(*Name)
- return (name.Class_ == PPARAM || name.Class_ == PPARAMOUT) && name.Heapaddr != nil
+ return (name.Class == PPARAM || name.Class == PPARAMOUT) && name.Heapaddr != nil
}
// IsParamHeapCopy reports whether this is the on-heap copy of
return false
}
name := n.(*Name)
- return name.Class_ == PAUTOHEAP && name.Stackcopy != nil
+ return name.Class == PAUTOHEAP && name.Stackcopy != nil
}
var RegFP *Name
Visit(n, func(n Node) {
switch n.Op() {
case ONAME:
- if n := n.(*Name); n.Class_ == PFUNC {
+ if n := n.(*Name); n.Class == PFUNC {
do(n.Defn)
}
case ODOTMETH, OCALLPART, OMETHEXPR:
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func ShouldTrack(n *ir.Name) bool {
- return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers()
+ return (n.Class == ir.PAUTO || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
lv.cache.initialized = true
for i, node := range lv.vars {
- switch node.Class_ {
+ switch node.Class {
case ir.PPARAM:
// A return instruction with a p.to is a tail return, which brings
// the stack pointer back up (if it ever went down) and then jumps
break
}
node := vars[i]
- switch node.Class_ {
+ switch node.Class {
case ir.PAUTO:
typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
// don't need to keep the stack copy live?
if lv.fn.HasDefer() {
for i, n := range lv.vars {
- if n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAMOUT {
if n.IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
if !liveout.Get(int32(i)) {
continue
}
- if n.Class_ == ir.PPARAM {
+ if n.Class == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Nname, n)
// the only things that can possibly be live are the
// input parameters.
for j, n := range lv.vars {
- if n.Class_ != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
+ if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
}
}
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *ir.Name
for _, n := range lv.vars {
- switch n.Class_ {
+ switch n.Class {
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
maxArgNode = n
n := ir.NewReturnStmt(p.pos(stmt), p.exprList(stmt.Results))
if len(n.Results) == 0 && ir.CurFunc != nil {
for _, ln := range ir.CurFunc.Dcl {
- if ln.Class_ == ir.PPARAM {
+ if ln.Class == ir.PPARAM {
continue
}
- if ln.Class_ != ir.PPARAMOUT {
+ if ln.Class != ir.PPARAMOUT {
break
}
if ln.Sym().Def != ln {
if c == nil || c.Curfn != ir.CurFunc {
// Do not have a closure var for the active closure yet; make one.
c = typecheck.NewName(s)
- c.Class_ = ir.PAUTOHEAP
+ c.Class = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.Defn = n
if n.Op() == ir.ONONAME {
continue
}
- if n.Op() != ir.ONAME || n.(*ir.Name).Class_ != ir.PEXTERN {
+ if n.Op() != ir.ONAME || n.(*ir.Name).Class != ir.PEXTERN {
base.Fatalf("bad inittask: %v", n)
}
deps = append(deps, n.(*ir.Name).Linksym())
sym := typecheck.Lookup(".inittask")
task := typecheck.NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
- task.Class_ = ir.PEXTERN
+ task.Class = ir.PEXTERN
sym.Def = task
lsym := task.Linksym()
ot := 0
defn := dep.Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
- if dep.Class_ != ir.PEXTERN || o.order[defn] == orderDone {
+ if dep.Class != ir.PEXTERN || o.order[defn] == orderDone {
continue
}
o.order[n]++
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
- if ref.Class_ == ir.PEXTERN && o.order[ref.Defn] == orderDone {
+ if ref.Class == ir.PEXTERN && o.order[ref.Defn] == orderDone {
continue
}
// the start.
i := -1
for j, n := range l {
- if n.Class_ == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
+ if n.Class == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
i = j
}
}
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class_ {
+ switch n.Class {
case ir.PEXTERN, ir.PFUNC:
d.foundDep(n)
}
return
}
d.seen.Add(n)
- if d.transitive && n.Class_ == ir.PFUNC {
+ if d.transitive && n.Class == ir.PFUNC {
d.inspectList(n.Defn.(*ir.Func).Body)
}
}
if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s)
n.SetType(types.Types[types.TUINT8])
- n.Class_ = ir.PEXTERN
+ n.Class = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
}
if s.Def == nil {
n := typecheck.NewName(s)
n.SetType(types.Types[types.TUINT8])
- n.Class_ = ir.PEXTERN
+ n.Class = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: n.Linksym()})
// }
nsym := dname(p.Sym().Name, "", nil, true)
t := p.Type()
- if p.Class_ != ir.PFUNC {
+ if p.Class != ir.PFUNC {
t = types.NewPtr(t)
}
tsym := WriteType(t)
if s.Def == nil {
x := typecheck.NewName(s)
x.SetType(types.Types[types.TUINT8])
- x.Class_ = ir.PEXTERN
+ x.Class = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}
case OpAddr, OpLocalAddr:
// Propagate the address if it points to an auto.
n, ok := v.Aux.(*ir.Name)
- if !ok || n.Class() != ir.PAUTO {
+ if !ok || n.Class != ir.PAUTO {
return
}
if addr[v] == nil {
case OpVarDef, OpVarKill:
// v should be eliminated if we eliminate the auto.
n, ok := v.Aux.(*ir.Name)
- if !ok || n.Class() != ir.PAUTO {
+ if !ok || n.Class != ir.PAUTO {
return
}
if elim[v] == nil {
// may not be used by the inline code, but will be used by
// panic processing).
n, ok := v.Aux.(*ir.Name)
- if !ok || n.Class() != ir.PAUTO {
+ if !ok || n.Class != ir.PAUTO {
return
}
if !used[n] {
if !ok {
continue
}
- if n.Class() != ir.PAUTO {
+ if n.Class != ir.PAUTO {
continue
}
}
func (TestFrontend) Auto(pos src.XPos, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"})
- n.SetClass(ir.PAUTO)
+ n.Class = ir.PAUTO
return n
}
func (d TestFrontend) SplitString(s LocalSlot) (LocalSlot, LocalSlot) {
return
}
fn := n.X.(*ir.Name)
- if fn.Class_ != ir.PFUNC || fn.Defn == nil {
+ if fn.Class != ir.PFUNC || fn.Defn == nil {
return
}
if !types.IsRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" {
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *ir.Name) bool {
- if (a.Class_ == ir.PAUTO) != (b.Class_ == ir.PAUTO) {
- return b.Class_ == ir.PAUTO
+ if (a.Class == ir.PAUTO) != (b.Class == ir.PAUTO) {
+ return b.Class == ir.PAUTO
}
- if a.Class_ != ir.PAUTO {
+ if a.Class != ir.PAUTO {
return a.FrameOffset() < b.FrameOffset()
}
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
- if ln.Class_ == ir.PAUTO {
+ if ln.Class == ir.PAUTO {
ln.SetUsed(false)
}
}
for _, b := range f.Blocks {
for _, v := range b.Values {
if n, ok := v.Aux.(*ir.Name); ok {
- switch n.Class_ {
+ switch n.Class {
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != ir.RegFP {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
- if n.Op() != ir.ONAME || n.Class_ != ir.PAUTO {
+ if n.Op() != ir.ONAME || n.Class != ir.PAUTO {
continue
}
if !n.Used() {
func StackOffset(slot ssa.LocalSlot) int32 {
n := slot.N
var off int64
- switch n.Class_ {
+ switch n.Class {
case ir.PAUTO:
off = n.FrameOffset()
if base.Ctxt.FixedFrameSize() == 0 {
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
- n.Class_ = cl
+ n.Class = cl
return n
}
testdata := []struct {
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
- n.Class_ = cl
+ n.Class = cl
return n
}
inp := []*ir.Name{
var args []ssa.Param
var results []ssa.Param
for _, n := range fn.Dcl {
- switch n.Class_ {
+ switch n.Class {
case ir.PPARAM:
s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
args = append(args, ssa.Param{Type: n.Type(), Offset: int32(n.FrameOffset())})
case ir.PFUNC:
// local function - already handled by frontend
default:
- s.Fatalf("local variable with class %v unimplemented", n.Class_)
+ s.Fatalf("local variable with class %v unimplemented", n.Class)
}
}
// Populate SSAable arguments.
for _, n := range fn.Dcl {
- if n.Class_ == ir.PPARAM && s.canSSA(n) {
+ if n.Class == ir.PPARAM && s.canSSA(n) {
v := s.newValue0A(ssa.OpArg, n.Type(), n)
s.vars[n] = v
s.addNamedValue(n, v) // This helps with debugging information, not needed for compilation itself.
case ir.OCALLINTER:
n := n.(*ir.CallExpr)
s.callResult(n, callNormal)
- if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
+ if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PFUNC {
if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem()
case ir.ODCL:
n := n.(*ir.Decl)
- if n.X.Class_ == ir.PAUTOHEAP {
+ if n.X.Class == ir.PAUTOHEAP {
s.Fatalf("DCL %v", n)
}
if !v.Addrtaken() {
s.Fatalf("VARLIVE variable %v must have Addrtaken set", v)
}
- switch v.Class_ {
+ switch v.Class {
case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
default:
s.Fatalf("VARLIVE variable %v must be Auto or Arg", v)
return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class_ == ir.PFUNC {
+ if n.Class == ir.PFUNC {
// "value" of a function is the address of the function's closure
sym := staticdata.FuncLinksym(n)
return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
if inplace {
if sn.Op() == ir.ONAME {
sn := sn.(*ir.Name)
- if sn.Class_ != ir.PEXTERN {
+ if sn.Class != ir.PEXTERN {
// Tell liveness we're about to build a new slice
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
}
// If this assignment clobbers an entire local variable, then emit
// OpVarDef so liveness analysis knows the variable is redefined.
- if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class_ != ir.PEXTERN && skip == 0 {
+ if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class != ir.PEXTERN && skip == 0 {
s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base.(*ir.Name), s.mem(), !ir.IsAutoTmp(base))
}
closureVal := s.expr(fn)
closure := s.openDeferSave(nil, fn.Type(), closureVal)
opendefer.closureNode = closure.Aux.(*ir.Name)
- if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC) {
+ if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
opendefer.closure = closure
}
} else if n.Op() == ir.OCALLMETH {
switch n.Op() {
case ir.OCALLFUNC:
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
- if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC {
+ if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
fn := fn.(*ir.Name)
sym = fn.Sym()
break
fallthrough
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class_ {
+ switch n.Class {
case ir.PEXTERN:
// global variable
v := s.entryNewValue1A(ssa.OpAddr, t, n.Linksym(), s.sb)
// that cse works on their addresses
return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
default:
- s.Fatalf("variable address class %v not implemented", n.Class_)
+ s.Fatalf("variable address class %v not implemented", n.Class)
return nil
}
case ir.ORESULT:
if ir.IsParamHeapCopy(name) {
return false
}
- if name.Class_ == ir.PAUTOHEAP {
+ if name.Class == ir.PAUTOHEAP {
s.Fatalf("canSSA of PAUTOHEAP %v", name)
}
- switch name.Class_ {
+ switch name.Class {
case ir.PEXTERN:
return false
case ir.PPARAMOUT:
return false
}
}
- if name.Class_ == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
+ if name.Class == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
// wrappers generated by genwrapper need to update
// the .this pointer in place.
// TODO: treat as a PPARAMOUT?
}
func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
- if n.Class_ == ir.Pxxx {
+ if n.Class == ir.Pxxx {
// Don't track our marker nodes (memVar etc.).
return
}
// Don't track temporary variables.
return
}
- if n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAMOUT {
// Don't track named output values. This prevents return values
// from being assigned too early. See #14591 and #14762. TODO: allow this.
return
if !n.Needzero() {
continue
}
- if n.Class_ != ir.PAUTO {
- e.Fatalf(n.Pos(), "needzero class %d", n.Class_)
+ if n.Class != ir.PAUTO {
+ e.Fatalf(n.Pos(), "needzero class %d", n.Class)
}
if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
a.Name = obj.NAME_EXTERN
a.Sym = n
case *ir.Name:
- if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
a.Sym = ir.Orig(n).(*ir.Name).Linksym()
a.Offset += n.FrameOffset()
a.Sym = n.Linksym()
a.Reg = int16(Arch.REGSP)
a.Offset = n.FrameOffset() + off
- if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
} else {
a.Name = obj.NAME_AUTO
func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
node := parent.N
- if node.Class_ != ir.PAUTO || node.Addrtaken() {
+ if node.Class != ir.PAUTO || node.Addrtaken() {
// addressed things and non-autos retain their parents (i.e., cannot truly be split)
return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
}
s.Def = n
ir.AsNode(s.Def).Name().SetUsed(true)
n.SetType(t)
- n.Class_ = ir.PAUTO
+ n.Class = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = e.curfn
e.curfn.Dcl = append(e.curfn.Dcl, n)
if n.Sym() == nil {
base.Fatalf("pfuncsym nil n sym")
}
- if f.Class_ != ir.PFUNC {
- base.Fatalf("pfuncsym class not PFUNC %d", f.Class_)
+ if f.Class != ir.PFUNC {
+ base.Fatalf("pfuncsym class not PFUNC %d", f.Class)
}
s := n.Linksym()
s.WriteAddr(base.Ctxt, noff, types.PtrSize, FuncLinksym(f), 0)
}
func FuncLinksym(n *ir.Name) *obj.LSym {
- if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC {
+ if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
base.Fatalf("expected func name: %v", n)
}
return FuncSym(n.Sym()).Linksym()
// like staticassign but we are copying an already
// initialized value r.
func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
- if rn.Class_ == ir.PFUNC {
+ if rn.Class == ir.PFUNC {
// TODO if roff != 0 { panic }
staticdata.InitFunc(l, loff, rn)
return true
}
- if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
+ if rn.Class != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
return false
}
if rn.Defn.Op() != ir.OAS {
case ir.OSTR2BYTES:
r := r.(*ir.ConvExpr)
- if l.Class_ == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
+ if l.Class == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
sval := ir.StringVal(r.X)
staticdata.InitSliceBytes(l, loff, sval)
return true
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
- n.Class_ = ctxt
+ n.Class = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
- n.Class_ = ir.PAUTO
+ n.Class = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = curfn
n.SetUsed(true)
}
n := ir.NewDeclNameAt(pos, op, s)
- n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
+ n.Class = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
return n
}
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
- if outermost.Class_ != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && v.Type().Size() <= 128 {
+ if outermost.Class != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && v.Type().Size() <= 128 {
v.SetByval(true)
} else {
outermost.SetAddrtaken(true)
}
for _, ln := range n.Dcl {
- if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) {
+ if ln.Op() == ir.ONAME && (ln.Class == ir.PPARAM || ln.Class == ir.PPARAMOUT) {
ln.Decldepth = 1
}
}
switch n.Op() {
case ir.ONAME:
- switch n.Class_ {
+ switch n.Class {
case ir.PEXTERN:
// Variable.
w.tag('V')
w.funcExt(n)
default:
- base.Fatalf("unexpected class: %v, %v", n, n.Class_)
+ base.Fatalf("unexpected class: %v, %v", n, n.Class)
}
case ir.OLITERAL:
case ir.ONAME:
// Package scope name.
n := n.(*ir.Name)
- if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
+ if (n.Class == ir.PEXTERN || n.Class == ir.PFUNC) && !ir.IsBlank(n) {
w.op(ir.ONONAME)
w.qualifiedIdent(n)
break
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
- if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Stackcopy == nil) {
+ if n.Class == ir.PAUTO || (n.Class == ir.PAUTOHEAP && n.Stackcopy == nil) {
v = n.Vargen
}
// methodSym already marked m.Sym as a function.
m := ir.NewNameAt(mpos, ir.MethodSym(recv.Type, msym))
- m.Class_ = ir.PFUNC
+ m.Class = ir.PFUNC
m.SetType(mtyp)
m.Func = ir.NewFunc(mpos)
types.CalcSize(t)
}
n := ir.NewNameAt(old.Pos(), old.Sym())
- n.Class_ = old.Class()
+ n.Class = old.Class
n.SetType(types.SubstAny(old.Type(), &types_))
if len(types_) > 0 {
base.Fatalf("substArgTypes: too many argument types")
// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
for _, ln := range fn.Dcl {
- if ln.Op() == ir.ONAME && ln.Class_ == ir.PAUTO && ln.Used() {
+ if ln.Op() == ir.ONAME && ln.Class == ir.PAUTO && ln.Used() {
if guard, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
guard.Used = true
}
}
for _, ln := range fn.Dcl {
- if ln.Op() != ir.ONAME || ln.Class_ != ir.PAUTO || ln.Used() {
+ if ln.Op() != ir.ONAME || ln.Class != ir.PAUTO || ln.Used() {
continue
}
if defn, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
ir.RegFP = NewName(Lookup(".fp"))
ir.RegFP.SetType(types.Types[types.TINT32])
- ir.RegFP.Class_ = ir.PPARAM
+ ir.RegFP.Class = ir.PPARAM
ir.RegFP.SetUsed(true)
}
appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
- if name == nil || name.Addrtaken() || name.Class_ == ir.PEXTERN || name.Class_ == ir.PAUTOHEAP {
+ if name == nil || name.Addrtaken() || name.Class == ir.PEXTERN || name.Class == ir.PAUTOHEAP {
memWrite = true
continue
}
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
- return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Addrtaken()
+ return n.Class == ir.PEXTERN || n.Class == ir.PAUTOHEAP || n.Addrtaken()
case ir.OADD,
ir.OAND,
v = addr
}
- v.Class_ = ir.PPARAM
+ v.Class = ir.PPARAM
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
if v.Byval() && v.Type().Width <= int64(2*types.PtrSize) {
// If it is a small variable captured by value, downgrade it to PAUTO.
- v.Class_ = ir.PAUTO
+ v.Class = ir.PAUTO
fn.Dcl = append(fn.Dcl, v)
body = append(body, ir.NewAssignStmt(base.Pos, v, cr))
} else {
// and initialize in entry prologue.
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
- addr.Class_ = ir.PAUTO
+ addr.Class = ir.PAUTO
addr.SetUsed(true)
addr.Curfn = fn
fn.Dcl = append(fn.Dcl, addr)
return false
}
n := nn.(*ir.Name)
- return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN
+ return n.Class != ir.PAUTOHEAP && n.Class != ir.PEXTERN
}
func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
// copy static to slice
var_ = typecheck.AssignExpr(var_)
name, offset, ok := staticinit.StaticLoc(var_)
- if !ok || name.Class_ != ir.PEXTERN {
+ if !ok || name.Class != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
}
staticdata.InitSlice(name, offset, vstat, t.NumElem())
}
name, offset, ok := staticinit.StaticLoc(as.X)
- if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) {
+ if !ok || (name.Class != ir.PEXTERN && as.X != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.X)
}
if r.Offset_ != 0 {
base.Fatalf("genAsStatic %+v", as)
}
- if r.Class_ == ir.PFUNC {
+ if r.Class == ir.PFUNC {
staticdata.InitFunc(name, offset, r)
return
}
if ir.Names.Staticuint64s == nil {
ir.Names.Staticuint64s = typecheck.NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
- ir.Names.Staticuint64s.Class_ = ir.PEXTERN
+ ir.Names.Staticuint64s.Class = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes.
ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
ir.Names.Zerobase = typecheck.NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
- ir.Names.Zerobase.Class_ = ir.PEXTERN
+ ir.Names.Zerobase.Class = ir.PEXTERN
ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR])
}
xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index)
xe.SetBounded(true)
value = xe
- case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly():
+ case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly():
// n.Left is a readonly global; use it directly.
value = n.X
case !fromType.IsInterface() && n.Esc() == ir.EscNone && fromType.Width <= 1024:
base.Fatalf("expression has untyped type: %+v", n)
}
- if n.Op() == ir.ONAME && n.(*ir.Name).Class_ == ir.PAUTOHEAP {
+ if n.Op() == ir.ONAME && n.(*ir.Name).Class == ir.PAUTOHEAP {
n := n.(*ir.Name)
nn := ir.NewStarExpr(base.Pos, n.Heapaddr)
nn.X.MarkNonNil()
// because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes.
func isaddrokay(n ir.Node) bool {
- return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n))
+ return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class == ir.PEXTERN || ir.IsAutoTmp(n))
}
// addrTemp ensures that n is okay to pass by address to runtime routines.
// race in the future.
nodpc := ir.NewNameAt(src.NoXPos, typecheck.Lookup(".fp"))
- nodpc.Class_ = ir.PPARAM
+ nodpc.Class = ir.PPARAM
nodpc.SetUsed(true)
nodpc.SetType(types.Types[types.TUINTPTR])
nodpc.SetFrameOffset(int64(-types.PtrSize))
// walkDecl walks an ODCL node.
func walkDecl(n *ir.Decl) ir.Node {
v := n.X
- if v.Class_ == ir.PAUTOHEAP {
+ if v.Class == ir.PAUTOHEAP {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", v)
}
func paramoutheap(fn *ir.Func) bool {
for _, ln := range fn.Dcl {
- switch ln.Class_ {
+ switch ln.Class {
case ir.PPARAMOUT:
if ir.IsParamStackCopy(ln) || ln.Addrtaken() {
return true
if stackcopy := v.Name().Stackcopy; stackcopy != nil {
nn = append(nn, walkStmt(ir.NewDecl(base.Pos, ir.ODCL, v.(*ir.Name))))
- if stackcopy.Class_ == ir.PPARAM {
+ if stackcopy.Class == ir.PPARAM {
nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, v, stackcopy))))
}
}
if v == nil {
continue
}
- if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class_ == ir.PPARAMOUT {
+ if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class == ir.PPARAMOUT {
nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, stackcopy, v))))
}
}