if l == n.Left() {
return n
}
- a := ir.SepCopy(n)
+ a := ir.SepCopy(n).(*ir.UnaryExpr)
a.SetLeft(l)
return typecheck(a, ctxExpr)
}
case ir.ONAME, ir.OLITERAL, ir.ONIL:
return n
- case ir.ODOT, ir.OLEN, ir.OCAP:
+ case ir.OLEN, ir.OCAP:
+ l := o.safeExpr(n.Left())
+ if l == n.Left() {
+ return n
+ }
+ a := ir.SepCopy(n).(*ir.UnaryExpr)
+ a.SetLeft(l)
+ return typecheck(a, ctxExpr)
+
+ case ir.ODOT:
l := o.safeExpr(n.Left())
if l == n.Left() {
return n
}
- a := ir.SepCopy(n)
+ a := ir.SepCopy(n).(*ir.SelectorExpr)
+ a.SetLeft(l)
+ return typecheck(a, ctxExpr)
+
+ case ir.ODOTPTR:
+ l := o.cheapExpr(n.Left())
+ if l == n.Left() {
+ return n
+ }
+ a := ir.SepCopy(n).(*ir.SelectorExpr)
a.SetLeft(l)
return typecheck(a, ctxExpr)
- case ir.ODOTPTR, ir.ODEREF:
+ case ir.ODEREF:
l := o.cheapExpr(n.Left())
if l == n.Left() {
return n
}
- a := ir.SepCopy(n)
+ a := ir.SepCopy(n).(*ir.StarExpr)
a.SetLeft(l)
return typecheck(a, ctxExpr)
if l == n.Left() && r == n.Right() {
return n
}
- a := ir.SepCopy(n)
+ a := ir.SepCopy(n).(*ir.IndexExpr)
a.SetLeft(l)
a.SetRight(r)
return typecheck(a, ctxExpr)
// because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes.
func isaddrokay(n ir.Node) bool {
- return islvalue(n) && (n.Op() != ir.ONAME || n.Class() == ir.PEXTERN || ir.IsAutoTmp(n))
+ return islvalue(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class() == ir.PEXTERN || ir.IsAutoTmp(n))
}
// addrTemp ensures that n is okay to pass by address to runtime routines.
if s.out != nil {
base.Fatalf("staticassign of const generated code: %+v", n)
}
- vstat = typecheck(vstat, ctxExpr)
+ vstat = typecheck(vstat, ctxExpr).(*ir.Name)
return vstat
}
if isaddrokay(n) {
replaced = true
case ir.OSTRUCTLIT:
for _, elem := range n.List().Slice() {
+ elem := elem.(*ir.StructKeyExpr)
if mapKeyReplaceStrConv(elem.Left()) {
replaced = true
}
case ir.OARRAYLIT:
for _, elem := range n.List().Slice() {
if elem.Op() == ir.OKEY {
- elem = elem.Right()
+ elem = elem.(*ir.KeyExpr).Right()
}
if mapKeyReplaceStrConv(elem) {
replaced = true
if base.Flag.N != 0 || instrumenting {
return
}
-
- if len(s) < 2 {
+ if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
return
}
- asn := s[0]
- copyn := s[1]
-
- if asn == nil || asn.Op() != ir.OAS {
- return
- }
- if asn.Left().Op() != ir.ONAME {
- return
- }
- if ir.IsBlank(asn.Left()) {
- return
- }
- maken := asn.Right()
- if maken == nil || maken.Op() != ir.OMAKESLICE {
- return
- }
- if maken.Esc() == EscNone {
- return
- }
- if maken.Left() == nil || maken.Right() != nil {
- return
- }
- if copyn.Op() != ir.OCOPY {
- return
- }
- if copyn.Left().Op() != ir.ONAME {
- return
- }
- if asn.Left().Sym() != copyn.Left().Sym() {
- return
- }
- if copyn.Right().Op() != ir.ONAME {
+ as := s[0].(*ir.AssignStmt)
+ cp := s[1].(*ir.BinaryExpr)
+ if as.Right() == nil || as.Right().Op() != ir.OMAKESLICE || ir.IsBlank(as.Left()) ||
+ as.Left().Op() != ir.ONAME || cp.Left().Op() != ir.ONAME || cp.Right().Op() != ir.ONAME ||
+ as.Left().Name() != cp.Left().Name() || cp.Left().Name() == cp.Right().Name() {
+ // The line above this one is correct with the differing equality operators:
+ // we want as.X and cp.X to be the same name,
+ // but we want the initial data to be coming from a different name.
return
}
- if copyn.Left().Sym() == copyn.Right().Sym() {
+ mk := as.Right().(*ir.MakeExpr)
+ if mk.Esc() == EscNone || mk.Left() == nil || mk.Right() != nil {
return
}
-
- maken.SetOp(ir.OMAKESLICECOPY)
- maken.SetRight(copyn.Right())
+ mk.SetOp(ir.OMAKESLICECOPY)
+ mk.SetRight(cp.Right())
// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
- maken.SetBounded(maken.Left().Op() == ir.OLEN && samesafeexpr(maken.Left().Left(), copyn.Right()))
-
- maken = typecheck(maken, ctxExpr)
-
+ mk.SetBounded(mk.Left().Op() == ir.OLEN && samesafeexpr(mk.Left().(*ir.UnaryExpr).Left(), cp.Right()))
+ as.SetRight(typecheck(mk, ctxExpr))
s[1] = nil // remove separate copy call
-
- return
}
// edge inserts coverage instrumentation for libfuzzer.
counter.Name().SetLibfuzzerExtraCounter(true)
// counter += 1
- incr := ir.Nod(ir.OASOP, counter, nodintconst(1))
- incr.SetSubOp(ir.OADD)
+ incr := ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, nodintconst(1))
o.append(incr)
}
// call orders the call expression n.
// n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY.
-func (o *Order) call(n ir.Node) {
- if n.Init().Len() > 0 {
- // Caller should have already called o.init(n).
- base.Fatalf("%v with unexpected ninit", n.Op())
+func (o *Order) call(nn ir.Node) {
+ if nn.Init().Len() > 0 {
+ // Caller should have already called o.init(nn).
+ base.Fatalf("%v with unexpected ninit", nn.Op())
}
// Builtin functions.
- if n.Op() != ir.OCALLFUNC && n.Op() != ir.OCALLMETH && n.Op() != ir.OCALLINTER {
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
- o.exprList(n.List())
+ if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLMETH && nn.Op() != ir.OCALLINTER {
+ switch n := nn.(type) {
+ default:
+ base.Fatalf("unexpected call: %+v", n)
+ case *ir.UnaryExpr:
+ n.SetLeft(o.expr(n.Left(), nil))
+ case *ir.ConvExpr:
+ n.SetLeft(o.expr(n.Left(), nil))
+ case *ir.BinaryExpr:
+ n.SetLeft(o.expr(n.Left(), nil))
+ n.SetRight(o.expr(n.Right(), nil))
+ case *ir.MakeExpr:
+ n.SetLeft(o.expr(n.Left(), nil))
+ n.SetRight(o.expr(n.Right(), nil))
+ case *ir.CallExpr:
+ o.exprList(n.List())
+ }
return
}
+ n := nn.(*ir.CallExpr)
fixVariadicCall(n)
n.SetLeft(o.expr(n.Left(), nil))
o.exprList(n.List())
// arrange for the pointer to be kept alive until the call returns,
// by copying it into a temp and marking that temp
// still alive when we pop the temp stack.
- if arg.Op() == ir.OCONVNOP && arg.Left().Type().IsUnsafePtr() {
- x := o.copyExpr(arg.Left())
- arg.SetLeft(x)
- x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
- n.PtrBody().Append(typecheck(ir.Nod(ir.OVARLIVE, x, nil), ctxStmt))
+ if arg.Op() == ir.OCONVNOP {
+ if arg.Left().Type().IsUnsafePtr() {
+ x := o.copyExpr(arg.Left())
+ arg.SetLeft(x)
+ x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
+ n.PtrBody().Append(typecheck(ir.Nod(ir.OVARLIVE, x, nil), ctxStmt))
+ }
}
}
default:
base.Fatalf("order.mapAssign %v", n.Op())
- case ir.OAS, ir.OASOP:
+ case ir.OAS:
if n.Left().Op() == ir.OINDEXMAP {
- // Make sure we evaluate the RHS before starting the map insert.
- // We need to make sure the RHS won't panic. See issue 22881.
- if n.Right().Op() == ir.OAPPEND {
- s := n.Right().List().Slice()[1:]
- for i, n := range s {
- s[i] = o.cheapExpr(n)
- }
- } else {
- n.SetRight(o.cheapExpr(n.Right()))
- }
+ n.SetRight(o.safeMapRHS(n.Right()))
+ }
+ o.out = append(o.out, n)
+ case ir.OASOP:
+ if n.Left().Op() == ir.OINDEXMAP {
+ n.SetRight(o.safeMapRHS(n.Right()))
}
o.out = append(o.out, n)
for i, m := range n.List().Slice() {
switch {
case m.Op() == ir.OINDEXMAP:
+ m := m.(*ir.IndexExpr)
if !ir.IsAutoTmp(m.Left()) {
m.SetLeft(o.copyExpr(m.Left()))
}
}
}
+func (o *Order) safeMapRHS(r ir.Node) ir.Node {
+ // Make sure we evaluate the RHS before starting the map insert.
+ // We need to make sure the RHS won't panic. See issue 22881.
+ if r.Op() == ir.OAPPEND {
+ s := r.List().Slice()[1:]
+ for i, n := range s {
+ s[i] = o.cheapExpr(n)
+ }
+ return r
+ }
+ return o.cheapExpr(r)
+}
+
// stmt orders the statement n, appending to o.out.
// Temporaries created during the statement are cleaned
// up using VARKILL instructions as possible.
// makes sure there is nothing too deep being copied.
l1 := o.safeExpr(n.Left())
l2 := ir.DeepCopy(src.NoXPos, l1)
- if l1.Op() == ir.OINDEXMAP {
+ if l2.Op() == ir.OINDEXMAP {
l2.SetIndexMapLValue(false)
}
l2 = o.copyExpr(l2)
r := o.expr(typecheck(ir.NewBinaryExpr(n.Pos(), n.SubOp(), l2, n.Right()), ctxExpr), nil)
- n = typecheck(ir.NodAt(n.Pos(), ir.OAS, l1, r), ctxStmt)
+ as := typecheck(ir.NodAt(n.Pos(), ir.OAS, l1, r), ctxStmt)
+ o.mapAssign(as)
+ o.cleanTemp(t)
+ return
}
o.mapAssign(n)
// Special: avoid copy of func call n.Right
case ir.OAS2FUNC:
+ n := n.(*ir.AssignListStmt)
t := o.markTemp()
o.exprList(n.List())
o.init(n.Rlist().First())
// OAS2MAPR: make sure key is addressable if needed,
// and make sure OINDEXMAP is not copied out.
case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
+ n := n.(*ir.AssignListStmt)
t := o.markTemp()
o.exprList(n.List())
switch r := n.Rlist().First(); r.Op() {
- case ir.ODOTTYPE2, ir.ORECV:
+ case ir.ODOTTYPE2:
+ r.SetLeft(o.expr(r.Left(), nil))
+ case ir.ORECV:
r.SetLeft(o.expr(r.Left(), nil))
case ir.OINDEXMAP:
r.SetLeft(o.expr(r.Left(), nil))
o.out = append(o.out, n)
o.cleanTemp(t)
- case ir.OCLOSE,
- ir.OCOPY,
- ir.OPRINT,
- ir.OPRINTN,
- ir.ORECOVER,
- ir.ORECV:
+ case ir.OCLOSE, ir.ORECV:
+ t := o.markTemp()
+ n.SetLeft(o.expr(n.Left(), nil))
+ o.out = append(o.out, n)
+ o.cleanTemp(t)
+
+ case ir.OCOPY:
t := o.markTemp()
n.SetLeft(o.expr(n.Left(), nil))
n.SetRight(o.expr(n.Right(), nil))
+ o.out = append(o.out, n)
+ o.cleanTemp(t)
+
+ case ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
+ t := o.markTemp()
o.exprList(n.List())
- o.exprList(n.Rlist())
o.out = append(o.out, n)
o.cleanTemp(t)
// Mark []byte(str) range expression to reuse string backing storage.
// It is safe because the storage cannot be mutated.
+ n := n.(*ir.RangeStmt)
if n.Right().Op() == ir.OSTR2BYTES {
- n.Right().SetOp(ir.OSTR2BYTESTMP)
+ n.Right().(*ir.ConvExpr).SetOp(ir.OSTR2BYTESTMP)
}
t := o.markTemp()
case ir.OSELECT:
t := o.markTemp()
- for _, n2 := range n.List().Slice() {
- if n2.Op() != ir.OCASE {
- base.Fatalf("order select case %v", n2.Op())
- }
- r := n2.Left()
- setlineno(n2)
+ for _, cas := range n.List().Slice() {
+ cas := cas.(*ir.CaseStmt)
+ r := cas.Left()
+ setlineno(cas)
// Append any new body prologue to ninit.
// The next loop will insert ninit into nbody.
- if n2.Init().Len() != 0 {
+ if cas.Init().Len() != 0 {
base.Fatalf("order select ninit")
}
if r == nil {
base.Fatalf("unknown op in select %v", r.Op())
case ir.OSELRECV, ir.OSELRECV2:
- var dst, ok, recv ir.Node
+ var dst, ok ir.Node
+ var recv *ir.UnaryExpr
+ var def bool
if r.Op() == ir.OSELRECV {
// case x = <-c
// case <-c (dst is ir.BlankNode)
- dst, ok, recv = r.Left(), ir.BlankNode, r.Right()
+ def, dst, ok, recv = r.Colas(), r.Left(), ir.BlankNode, r.Right().(*ir.UnaryExpr)
} else {
+ r := r.(*ir.AssignListStmt)
// case x, ok = <-c
- dst, ok, recv = r.List().First(), r.List().Second(), r.Rlist().First()
+ def, dst, ok, recv = r.Colas(), r.List().First(), r.List().Second(), r.Rlist().First().(*ir.UnaryExpr)
}
// If this is case x := <-ch or case x, y := <-ch, the case has
// the ODCL nodes to declare x and y. We want to delay that
// declaration (and possible allocation) until inside the case body.
// Delete the ODCL nodes here and recreate them inside the body below.
- if r.Colas() {
+ if def {
init := r.Init().Slice()
- if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].Left() == dst {
+ if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).Left() == dst {
init = init[1:]
}
- if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].Left() == ok {
+ if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).Left() == ok {
init = init[1:]
}
r.PtrInit().Set(init)
// use channel element type for temporary to avoid conversions,
// such as in case interfacevalue = <-intchan.
// the conversion happens in the OAS instead.
- if r.Colas() {
+ if def {
dcl := ir.Nod(ir.ODCL, dst, nil)
- n2.PtrInit().Append(typecheck(dcl, ctxStmt))
+ cas.PtrInit().Append(typecheck(dcl, ctxStmt))
}
tmp := o.newTemp(recv.Left().Type().Elem(), recv.Left().Type().Elem().HasPointers())
as := ir.Nod(ir.OAS, dst, tmp)
- n2.PtrInit().Append(typecheck(as, ctxStmt))
+ cas.PtrInit().Append(typecheck(as, ctxStmt))
dst = tmp
}
if !ir.IsBlank(ok) {
- if r.Colas() {
+ if def {
dcl := ir.Nod(ir.ODCL, ok, nil)
- n2.PtrInit().Append(typecheck(dcl, ctxStmt))
+ cas.PtrInit().Append(typecheck(dcl, ctxStmt))
}
tmp := o.newTemp(types.Types[types.TBOOL], false)
as := ir.Nod(ir.OAS, ok, conv(tmp, ok.Type()))
- n2.PtrInit().Append(typecheck(as, ctxStmt))
+ cas.PtrInit().Append(typecheck(as, ctxStmt))
ok = tmp
}
if r.Op() == ir.OSELRECV {
r.SetLeft(dst)
} else {
+ r := r.(*ir.AssignListStmt)
r.List().SetIndex(0, dst)
r.List().SetIndex(1, ok)
}
- orderBlock(n2.PtrInit(), o.free)
+ orderBlock(cas.PtrInit(), o.free)
case ir.OSEND:
if r.Init().Len() != 0 {
// Now that we have accumulated all the temporaries, clean them.
// Also insert any ninit queued during the previous loop.
// (The temporary cleaning must follow that ninit work.)
- for _, n3 := range n.List().Slice() {
- orderBlock(n3.PtrBody(), o.free)
- n3.PtrBody().Prepend(o.cleanTempNoPop(t)...)
+ for _, cas := range n.List().Slice() {
+ cas := cas.(*ir.CaseStmt)
+ orderBlock(cas.PtrBody(), o.free)
+ cas.PtrBody().Prepend(o.cleanTempNoPop(t)...)
// TODO(mdempsky): Is this actually necessary?
// walkselect appears to walk Ninit.
- n3.PtrBody().Prepend(n3.Init().Slice()...)
- n3.PtrInit().Set(nil)
+ cas.PtrBody().Prepend(cas.Init().Slice()...)
+ cas.PtrInit().Set(nil)
}
o.out = append(o.out, n)
// For now just clean all the temporaries at the end.
// In practice that's fine.
case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
// Add empty "default:" case for instrumentation.
n.PtrList().Append(ir.Nod(ir.OCASE, nil, nil))
t := o.markTemp()
n.SetLeft(o.expr(n.Left(), nil))
for _, ncas := range n.List().Slice() {
- if ncas.Op() != ir.OCASE {
- base.Fatalf("order switch case %v", ncas.Op())
- }
+ ncas := ncas.(*ir.CaseStmt)
o.exprListInPlace(ncas.List())
orderBlock(ncas.PtrBody(), o.free)
}
base.Pos = lno
}
-func hasDefaultCase(n ir.Node) bool {
+func hasDefaultCase(n *ir.SwitchStmt) bool {
for _, ncas := range n.List().Slice() {
- if ncas.Op() != ir.OCASE {
- base.Fatalf("expected case, found %v", ncas.Op())
- }
+ ncas := ncas.(*ir.CaseStmt)
if ncas.List().Len() == 0 {
return true
}
if n == nil {
return n
}
-
lno := setlineno(n)
+ n = o.expr1(n, lhs)
+ base.Pos = lno
+ return n
+}
+
+func (o *Order) expr1(n, lhs ir.Node) ir.Node {
o.init(n)
switch n.Op() {
o.edit = o.exprNoLHS // create closure once
}
ir.EditChildren(n, o.edit)
+ return n
// Addition of strings turns into a function call.
// Allocate a temporary to hold the strings.
}
}
}
+ return n
case ir.OINDEXMAP:
n.SetLeft(o.expr(n.Left(), nil))
// key must be addressable
n.SetRight(o.mapKeyTemp(n.Left().Type(), n.Right()))
if needCopy {
- n = o.copyExpr(n)
+ return o.copyExpr(n)
}
+ return n
// concrete type (not interface) argument might need an addressable
// temporary to pass to the runtime conversion routine.
case ir.OCONVIFACE:
n.SetLeft(o.expr(n.Left(), nil))
if n.Left().Type().IsInterface() {
- break
+ return n
}
if _, needsaddr := convFuncName(n.Left().Type(), n.Type()); needsaddr || isStaticCompositeLiteral(n.Left()) {
// Need a temp if we need to pass the address to the conversion function.
// whose address we can put directly in an interface (see OCONVIFACE case in walk).
n.SetLeft(o.addrTemp(n.Left()))
}
+ return n
case ir.OCONVNOP:
if n.Type().IsKind(types.TUNSAFEPTR) && n.Left().Type().IsKind(types.TUINTPTR) && (n.Left().Op() == ir.OCALLFUNC || n.Left().Op() == ir.OCALLINTER || n.Left().Op() == ir.OCALLMETH) {
+ call := n.Left().(*ir.CallExpr)
// When reordering unsafe.Pointer(f()) into a separate
// statement, the conversion and function call must stay
// together. See golang.org/issue/15329.
- o.init(n.Left())
- o.call(n.Left())
+ o.init(call)
+ o.call(call)
if lhs == nil || lhs.Op() != ir.ONAME || instrumenting {
- n = o.copyExpr(n)
+ return o.copyExpr(n)
}
} else {
n.SetLeft(o.expr(n.Left(), nil))
}
+ return n
case ir.OANDAND, ir.OOROR:
// ... = LHS && RHS
nif.PtrRlist().Set(gen)
}
o.out = append(o.out, nif)
- n = r
+ return r
case ir.OCALLFUNC,
ir.OCALLINTER,
if isRuneCount(n) {
// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
- n.Left().SetLeft(o.expr(n.Left().Left(), nil))
+ conv := n.(*ir.UnaryExpr).Left().(*ir.ConvExpr)
+ conv.SetLeft(o.expr(conv.Left(), nil))
} else {
o.call(n)
}
if lhs == nil || lhs.Op() != ir.ONAME || instrumenting {
- n = o.copyExpr(n)
+ return o.copyExpr(n)
}
+ return n
case ir.OAPPEND:
// Check for append(x, make([]T, y)...) .
if isAppendOfMake(n) {
- n.List().SetFirst(o.expr(n.List().First(), nil)) // order x
- n.List().Second().SetLeft(o.expr(n.List().Second().Left(), nil)) // order y
+ n.List().SetFirst(o.expr(n.List().First(), nil)) // order x
+ mk := n.List().Second().(*ir.MakeExpr)
+ mk.SetLeft(o.expr(mk.Left(), nil)) // order y
} else {
o.exprList(n.List())
}
if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.List().First()) {
- n = o.copyExpr(n)
+ return o.copyExpr(n)
}
+ return n
case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
n.SetLeft(o.expr(n.Left(), nil))
max = o.cheapExpr(max)
n.SetSliceBounds(low, high, max)
if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.Left()) {
- n = o.copyExpr(n)
+ return o.copyExpr(n)
}
+ return n
case ir.OCLOSURE:
+ n := n.(*ir.ClosureExpr)
if n.Transient() && len(n.Func().ClosureVars) > 0 {
prealloc[n] = o.newTemp(closureType(n), false)
}
+ return n
- case ir.OSLICELIT, ir.OCALLPART:
+ case ir.OCALLPART:
+ n := n.(*ir.CallPartExpr)
n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ if n.Transient() {
+ t := partialCallType(n)
+ prealloc[n] = o.newTemp(t, false)
+ }
+ return n
+
+ case ir.OSLICELIT:
o.exprList(n.List())
- o.exprList(n.Rlist())
if n.Transient() {
- var t *types.Type
- switch n.Op() {
- case ir.OSLICELIT:
- t = types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right()))
- case ir.OCALLPART:
- t = partialCallType(n)
- }
+ t := types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right()))
prealloc[n] = o.newTemp(t, false)
}
+ return n
case ir.ODOTTYPE, ir.ODOTTYPE2:
n.SetLeft(o.expr(n.Left(), nil))
if !isdirectiface(n.Type()) || instrumenting {
- n = o.copyExprClear(n)
+ return o.copyExprClear(n)
}
+ return n
case ir.ORECV:
n.SetLeft(o.expr(n.Left(), nil))
- n = o.copyExprClear(n)
+ return o.copyExprClear(n)
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n.SetLeft(o.expr(n.Left(), nil))
// buffer during conversion. String comparison does not
// memorize the strings for later use, so it is safe.
if n.Left().Op() == ir.OBYTES2STR {
- n.Left().SetOp(ir.OBYTES2STRTMP)
+ n.Left().(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
}
if n.Right().Op() == ir.OBYTES2STR {
- n.Right().SetOp(ir.OBYTES2STRTMP)
+ n.Right().(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
}
case t.IsStruct() || t.IsArray():
n.SetLeft(o.addrTemp(n.Left()))
n.SetRight(o.addrTemp(n.Right()))
}
+ return n
+
case ir.OMAPLIT:
// Order map by converting:
// map[int]int{
// See issue 26552.
entries := n.List().Slice()
statics := entries[:0]
- var dynamics []ir.Node
+ var dynamics []*ir.KeyExpr
for _, r := range entries {
- if r.Op() != ir.OKEY {
- base.Fatalf("OMAPLIT entry not OKEY: %v\n", r)
- }
+ r := r.(*ir.KeyExpr)
if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
dynamics = append(dynamics, r)
// Recursively ordering some static entries can change them to dynamic;
// e.g., OCONVIFACE nodes. See #31777.
- r = o.expr(r, nil)
+ r = o.expr(r, nil).(*ir.KeyExpr)
if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
dynamics = append(dynamics, r)
continue
n.PtrList().Set(statics)
if len(dynamics) == 0 {
- break
+ return n
}
// Emit the creation of the map (with all its static entries).
as := ir.Nod(ir.OAS, m, n)
typecheck(as, ctxStmt)
o.stmt(as)
- n = m
// Emit eval+insert of dynamic entries, one at a time.
for _, r := range dynamics {
- as := ir.Nod(ir.OAS, ir.Nod(ir.OINDEX, n, r.Left()), r.Right())
+ as := ir.Nod(ir.OAS, ir.Nod(ir.OINDEX, m, r.Left()), r.Right())
typecheck(as, ctxStmt) // Note: this converts the OINDEX to an OINDEXMAP
o.stmt(as)
}
+ return m
}
- base.Pos = lno
- return n
+ // No return - type-assertions above. Each case must return for itself.
}
// as2 orders OAS2XXXX nodes. It creates temporaries to ensure left-to-right assignment.
// tmp1, tmp2, tmp3 = ...
// a, b, a = tmp1, tmp2, tmp3
// This is necessary to ensure left to right assignment order.
-func (o *Order) as2(n ir.Node) {
+func (o *Order) as2(n *ir.AssignListStmt) {
tmplist := []ir.Node{}
left := []ir.Node{}
for ni, l := range n.List().Slice() {
// okAs2 orders OAS2XXX with ok.
// Just like as2, this also adds temporaries to ensure left-to-right assignment.
-func (o *Order) okAs2(n ir.Node) {
+func (o *Order) okAs2(n *ir.AssignListStmt) {
var tmp1, tmp2 ir.Node
if !ir.IsBlank(n.List().First()) {
typ := n.Rlist().First().Type()