+++ /dev/null
-// Code generated from gen/decArgs.rules; DO NOT EDIT.
-// generated with: cd gen; go run *.go
-
-package ssa
-
-func rewriteValuedecArgs(v *Value) bool {
- switch v.Op {
- case OpArg:
- return rewriteValuedecArgs_OpArg(v)
- }
- return false
-}
-func rewriteValuedecArgs_OpArg(v *Value) bool {
- b := v.Block
- config := b.Func.Config
- fe := b.Func.fe
- typ := &b.Func.Config.Types
- // match: (Arg {n} [off])
- // cond: v.Type.IsString()
- // result: (StringMake (Arg <typ.BytePtr> {n} [off]) (Arg <typ.Int> {n} [off+int32(config.PtrSize)]))
- for {
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(v.Type.IsString()) {
- break
- }
- v.reset(OpStringMake)
- v0 := b.NewValue0(v.Pos, OpArg, typ.BytePtr)
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, typ.Int)
- v1.AuxInt = int32ToAuxInt(off + int32(config.PtrSize))
- v1.Aux = symToAux(n)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Arg {n} [off])
- // cond: v.Type.IsSlice()
- // result: (SliceMake (Arg <v.Type.Elem().PtrTo()> {n} [off]) (Arg <typ.Int> {n} [off+int32(config.PtrSize)]) (Arg <typ.Int> {n} [off+2*int32(config.PtrSize)]))
- for {
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(v.Type.IsSlice()) {
- break
- }
- v.reset(OpSliceMake)
- v0 := b.NewValue0(v.Pos, OpArg, v.Type.Elem().PtrTo())
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, typ.Int)
- v1.AuxInt = int32ToAuxInt(off + int32(config.PtrSize))
- v1.Aux = symToAux(n)
- v2 := b.NewValue0(v.Pos, OpArg, typ.Int)
- v2.AuxInt = int32ToAuxInt(off + 2*int32(config.PtrSize))
- v2.Aux = symToAux(n)
- v.AddArg3(v0, v1, v2)
- return true
- }
- // match: (Arg {n} [off])
- // cond: v.Type.IsInterface()
- // result: (IMake (Arg <typ.Uintptr> {n} [off]) (Arg <typ.BytePtr> {n} [off+int32(config.PtrSize)]))
- for {
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(v.Type.IsInterface()) {
- break
- }
- v.reset(OpIMake)
- v0 := b.NewValue0(v.Pos, OpArg, typ.Uintptr)
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, typ.BytePtr)
- v1.AuxInt = int32ToAuxInt(off + int32(config.PtrSize))
- v1.Aux = symToAux(n)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Arg {n} [off])
- // cond: v.Type.IsComplex() && v.Type.Size() == 16
- // result: (ComplexMake (Arg <typ.Float64> {n} [off]) (Arg <typ.Float64> {n} [off+8]))
- for {
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(v.Type.IsComplex() && v.Type.Size() == 16) {
- break
- }
- v.reset(OpComplexMake)
- v0 := b.NewValue0(v.Pos, OpArg, typ.Float64)
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, typ.Float64)
- v1.AuxInt = int32ToAuxInt(off + 8)
- v1.Aux = symToAux(n)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Arg {n} [off])
- // cond: v.Type.IsComplex() && v.Type.Size() == 8
- // result: (ComplexMake (Arg <typ.Float32> {n} [off]) (Arg <typ.Float32> {n} [off+4]))
- for {
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(v.Type.IsComplex() && v.Type.Size() == 8) {
- break
- }
- v.reset(OpComplexMake)
- v0 := b.NewValue0(v.Pos, OpArg, typ.Float32)
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, typ.Float32)
- v1.AuxInt = int32ToAuxInt(off + 4)
- v1.Aux = symToAux(n)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Arg <t>)
- // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)
- // result: (StructMake0)
- for {
- t := v.Type
- if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpStructMake0)
- return true
- }
- // match: (Arg <t> {n} [off])
- // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
- // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+int32(t.FieldOff(0))]))
- for {
- t := v.Type
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpStructMake1)
- v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
- v0.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(0)))
- v0.Aux = symToAux(n)
- v.AddArg(v0)
- return true
- }
- // match: (Arg <t> {n} [off])
- // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)
- // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+int32(t.FieldOff(0))]) (Arg <t.FieldType(1)> {n} [off+int32(t.FieldOff(1))]))
- for {
- t := v.Type
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpStructMake2)
- v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
- v0.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(0)))
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
- v1.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(1)))
- v1.Aux = symToAux(n)
- v.AddArg2(v0, v1)
- return true
- }
- // match: (Arg <t> {n} [off])
- // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)
- // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+int32(t.FieldOff(0))]) (Arg <t.FieldType(1)> {n} [off+int32(t.FieldOff(1))]) (Arg <t.FieldType(2)> {n} [off+int32(t.FieldOff(2))]))
- for {
- t := v.Type
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpStructMake3)
- v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
- v0.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(0)))
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
- v1.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(1)))
- v1.Aux = symToAux(n)
- v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
- v2.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(2)))
- v2.Aux = symToAux(n)
- v.AddArg3(v0, v1, v2)
- return true
- }
- // match: (Arg <t> {n} [off])
- // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)
- // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+int32(t.FieldOff(0))]) (Arg <t.FieldType(1)> {n} [off+int32(t.FieldOff(1))]) (Arg <t.FieldType(2)> {n} [off+int32(t.FieldOff(2))]) (Arg <t.FieldType(3)> {n} [off+int32(t.FieldOff(3))]))
- for {
- t := v.Type
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpStructMake4)
- v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
- v0.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(0)))
- v0.Aux = symToAux(n)
- v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
- v1.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(1)))
- v1.Aux = symToAux(n)
- v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
- v2.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(2)))
- v2.Aux = symToAux(n)
- v3 := b.NewValue0(v.Pos, OpArg, t.FieldType(3))
- v3.AuxInt = int32ToAuxInt(off + int32(t.FieldOff(3)))
- v3.Aux = symToAux(n)
- v.AddArg4(v0, v1, v2, v3)
- return true
- }
- // match: (Arg <t>)
- // cond: t.IsArray() && t.NumElem() == 0
- // result: (ArrayMake0)
- for {
- t := v.Type
- if !(t.IsArray() && t.NumElem() == 0) {
- break
- }
- v.reset(OpArrayMake0)
- return true
- }
- // match: (Arg <t> {n} [off])
- // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)
- // result: (ArrayMake1 (Arg <t.Elem()> {n} [off]))
- for {
- t := v.Type
- off := auxIntToInt32(v.AuxInt)
- n := auxToSym(v.Aux)
- if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) {
- break
- }
- v.reset(OpArrayMake1)
- v0 := b.NewValue0(v.Pos, OpArg, t.Elem())
- v0.AuxInt = int32ToAuxInt(off)
- v0.Aux = symToAux(n)
- v.AddArg(v0)
- return true
- }
- return false
-}
-func rewriteBlockdecArgs(b *Block) bool {
- switch b.Kind {
- }
- return false
-}
// It returns a BlockRet block that ends the control flow. Its control value
// will be set to the final memory state.
func (s *state) exit() *ssa.Block {
- lateResultLowering := s.f.DebugTest && ssa.LateCallExpansionEnabledWithin(s.f)
+ lateResultLowering := s.f.DebugTest
if s.hasdefer {
if s.hasOpenDefers {
if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
s.lastDeferExit = deferExit
s.lastDeferCount = len(s.openDefers)
zeroval := s.constInt8(types.Types[types.TUINT8], 0)
- testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f)
// Test for and run defers in reverse order
for i := len(s.openDefers) - 1; i >= 0; i-- {
r := s.openDefers[i]
if r.rcvr != nil {
// rcvr in case of OCALLINTER
v := s.load(r.rcvr.Type.Elem(), r.rcvr)
- addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart)
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(argStart)})
- if testLateExpansion {
- callArgs = append(callArgs, v)
- } else {
- s.store(types.Types[types.TUINTPTR], addr, v)
- }
+ callArgs = append(callArgs, v)
}
for j, argAddrVal := range r.argVals {
f := getParam(r.n, j)
- pt := types.NewPtr(f.Type)
ACArgs = append(ACArgs, ssa.Param{Type: f.Type, Offset: int32(argStart + f.Offset)})
- if testLateExpansion {
- var a *ssa.Value
- if !TypeOK(f.Type) {
- a = s.newValue2(ssa.OpDereference, f.Type, argAddrVal, s.mem())
- } else {
- a = s.load(f.Type, argAddrVal)
- }
- callArgs = append(callArgs, a)
+ var a *ssa.Value
+ if !TypeOK(f.Type) {
+ a = s.newValue2(ssa.OpDereference, f.Type, argAddrVal, s.mem())
} else {
- addr := s.constOffPtrSP(pt, argStart+f.Offset)
- if !TypeOK(f.Type) {
- s.move(f.Type, addr, argAddrVal)
- } else {
- argVal := s.load(f.Type, argAddrVal)
- s.storeType(f.Type, addr, argVal, 0, false)
- }
+ a = s.load(f.Type, argAddrVal)
}
+ callArgs = append(callArgs, a)
}
var call *ssa.Value
if r.closure != nil {
s.maybeNilCheckClosure(v, callDefer)
codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
aux := ssa.ClosureAuxCall(ACArgs, ACResults)
- if testLateExpansion {
- callArgs = append(callArgs, s.mem())
- call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, aux, codeptr, v, s.mem())
- }
+ callArgs = append(callArgs, s.mem())
+ call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
+ call.AddArgs(callArgs...)
} else {
aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), ACArgs, ACResults)
- if testLateExpansion {
- callArgs = append(callArgs, s.mem())
- call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
- call.AddArgs(callArgs...)
- } else {
- // Do a static call if the original call was a static function or method
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
- }
+ callArgs = append(callArgs, s.mem())
+ call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
+ call.AddArgs(callArgs...)
}
call.AuxInt = stksize
- if testLateExpansion {
- s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
- } else {
- s.vars[memVar] = call
- }
+ s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
// Make sure that the stack slots with pointers are kept live
// through the call (which is a pre-emption point). Also, we will
// use the first call of the last defer exit to compute liveness
}
}
- testLateExpansion := false
inRegisters := false
switch n.Op() {
case ir.OCALLFUNC:
- testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
fn := fn.(*ir.Name)
callee = fn
s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
}
fn := fn.(*ir.SelectorExpr)
- testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
var iclosure *ssa.Value
iclosure, rcvr = s.getClosureAndRcvr(fn)
if k == callNormal {
var call *ssa.Value
if k == callDeferStack {
- testLateExpansion = ssa.LateCallExpansionEnabledWithin(s.f)
// Make a defer struct d on the stack.
t := deferstruct(stksize)
d := typecheck.TempAt(n.Pos(), s.curfn, t)
// Call runtime.deferprocStack with pointer to _defer record.
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())})
aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, ACArgs, ACResults)
- if testLateExpansion {
- callArgs = append(callArgs, addr, s.mem())
- call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
- call.AddArgs(callArgs...)
- } else {
- arg0 := s.constOffPtrSP(types.Types[types.TUINTPTR], base.Ctxt.FixedFrameSize())
- s.store(types.Types[types.TUINTPTR], arg0, addr)
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
- }
+ callArgs = append(callArgs, addr, s.mem())
+ call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
+ call.AddArgs(callArgs...)
if stksize < int64(types.PtrSize) {
// We need room for both the call to deferprocStack and the call to
// the deferred function.
// Write argsize and closure (args to newproc/deferproc).
argsize := s.constInt32(types.Types[types.TUINT32], int32(stksize))
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINT32], Offset: int32(argStart)})
- if testLateExpansion {
- callArgs = append(callArgs, argsize)
- } else {
- addr := s.constOffPtrSP(s.f.Config.Types.UInt32Ptr, argStart)
- s.store(types.Types[types.TUINT32], addr, argsize)
- }
+ callArgs = append(callArgs, argsize)
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(argStart) + int32(types.PtrSize)})
- if testLateExpansion {
- callArgs = append(callArgs, closure)
- } else {
- addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart+int64(types.PtrSize))
- s.store(types.Types[types.TUINTPTR], addr, closure)
- }
+ callArgs = append(callArgs, closure)
stksize += 2 * int64(types.PtrSize)
argStart += 2 * int64(types.PtrSize)
}
// Set receiver (for interface calls).
if rcvr != nil {
- addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart)
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(argStart)})
- if testLateExpansion {
- callArgs = append(callArgs, rcvr)
- } else {
- s.store(types.Types[types.TUINTPTR], addr, rcvr)
- }
+ callArgs = append(callArgs, rcvr)
}
// Write args.
}
for i, n := range args {
f := t.Params().Field(i)
- ACArg, arg := s.putArg(n, f.Type, argStart+f.Offset, testLateExpansion)
+ ACArg, arg := s.putArg(n, f.Type, argStart+f.Offset)
ACArgs = append(ACArgs, ACArg)
callArgs = append(callArgs, arg)
}
switch {
case k == callDefer:
aux := ssa.StaticAuxCall(ir.Syms.Deferproc, ACArgs, ACResults)
- if testLateExpansion {
- call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
- }
+ call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
+ call.AddArgs(callArgs...)
case k == callGo:
aux := ssa.StaticAuxCall(ir.Syms.Newproc, ACArgs, ACResults)
- if testLateExpansion {
- call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
- }
+ call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
+ call.AddArgs(callArgs...)
case closure != nil:
// rawLoad because loading the code pointer from a
// closure is always safe, but IsSanitizerSafeAddr
// critical that we not clobber any arguments already
// stored onto the stack.
codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
- if testLateExpansion {
- aux := ssa.ClosureAuxCall(ACArgs, ACResults)
- call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, ssa.ClosureAuxCall(ACArgs, ACResults), codeptr, closure, s.mem())
- }
+ aux := ssa.ClosureAuxCall(ACArgs, ACResults)
+ call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
+ call.AddArgs(callArgs...)
case codeptr != nil:
- if testLateExpansion {
- aux := ssa.InterfaceAuxCall(ACArgs, ACResults)
- call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue2A(ssa.OpInterCall, types.TypeMem, ssa.InterfaceAuxCall(ACArgs, ACResults), codeptr, s.mem())
- }
+ aux := ssa.InterfaceAuxCall(ACArgs, ACResults)
+ call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
+ call.AddArgs(callArgs...)
case callee != nil:
- if testLateExpansion {
- aux := ssa.StaticAuxCall(callTargetLSym(callee, s.curfn.LSym), ACArgs, ACResults)
- call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
- call.AddArgs(callArgs...)
- } else {
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(callTargetLSym(callee, s.curfn.LSym), ACArgs, ACResults), s.mem())
- }
+ aux := ssa.StaticAuxCall(callTargetLSym(callee, s.curfn.LSym), ACArgs, ACResults)
+ call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
+ call.AddArgs(callArgs...)
default:
s.Fatalf("bad call type %v %v", n.Op(), n)
}
call.AuxInt = stksize // Call operations carry the argsize of the callee along with them
}
- if testLateExpansion {
- s.prevCall = call
- s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
- } else {
- s.vars[memVar] = call
- }
+ s.prevCall = call
+ s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
// Insert OVARLIVE nodes
for _, name := range n.KeepAlive {
s.stmt(ir.NewUnaryExpr(n.Pos(), ir.OVARLIVE, name))
fp := res.Field(0)
if returnResultAddr {
pt := types.NewPtr(fp.Type)
- if testLateExpansion {
- return s.newValue1I(ssa.OpSelectNAddr, pt, 0, call)
- }
- return s.constOffPtrSP(pt, fp.Offset+base.Ctxt.FixedFrameSize())
+ return s.newValue1I(ssa.OpSelectNAddr, pt, 0, call)
}
- if testLateExpansion {
- return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
- }
- return s.load(n.Type(), s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+base.Ctxt.FixedFrameSize()))
+ return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
}
// maybeNilCheckClosure checks if a nil check of a closure is needed in some
s.prevCall = nil
// Write args to the stack
off := base.Ctxt.FixedFrameSize()
- testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f)
var ACArgs []ssa.Param
var ACResults []ssa.Param
var callArgs []*ssa.Value
off = types.Rnd(off, t.Alignment())
size := t.Size()
ACArgs = append(ACArgs, ssa.Param{Type: t, Offset: int32(off)})
- if testLateExpansion {
- callArgs = append(callArgs, arg)
- } else {
- ptr := s.constOffPtrSP(t.PtrTo(), off)
- s.store(t, ptr, arg)
- }
+ callArgs = append(callArgs, arg)
off += size
}
off = types.Rnd(off, int64(types.RegSize))
// Issue call
var call *ssa.Value
aux := ssa.StaticAuxCall(fn, ACArgs, ACResults)
- if testLateExpansion {
callArgs = append(callArgs, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...)
s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
- } else {
- call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
- s.vars[memVar] = call
- }
if !returns {
// Finish block
// Load results
res := make([]*ssa.Value, len(results))
- if testLateExpansion {
- for i, t := range results {
- off = types.Rnd(off, t.Alignment())
- if TypeOK(t) {
- res[i] = s.newValue1I(ssa.OpSelectN, t, int64(i), call)
- } else {
- addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), int64(i), call)
- res[i] = s.rawLoad(t, addr)
- }
- off += t.Size()
- }
- } else {
- for i, t := range results {
- off = types.Rnd(off, t.Alignment())
- ptr := s.constOffPtrSP(types.NewPtr(t), off)
- res[i] = s.load(t, ptr)
- off += t.Size()
+ for i, t := range results {
+ off = types.Rnd(off, t.Alignment())
+ if TypeOK(t) {
+ res[i] = s.newValue1I(ssa.OpSelectN, t, int64(i), call)
+ } else {
+ addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), int64(i), call)
+ res[i] = s.rawLoad(t, addr)
}
+ off += t.Size()
}
off = types.Rnd(off, int64(types.PtrSize))
// putArg evaluates n for the purpose of passing it as an argument to a function and returns the corresponding Param for the call.
// If forLateExpandedCall is true, it returns the argument value to pass to the call operation.
// If forLateExpandedCall is false, then the value is stored at the specified stack offset, and the returned value is nil.
-func (s *state) putArg(n ir.Node, t *types.Type, off int64, forLateExpandedCall bool) (ssa.Param, *ssa.Value) {
+func (s *state) putArg(n ir.Node, t *types.Type, off int64) (ssa.Param, *ssa.Value) {
var a *ssa.Value
- if forLateExpandedCall {
- if !TypeOK(t) {
- a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
- } else {
- a = s.expr(n)
- }
+ if !TypeOK(t) {
+ a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
} else {
- s.storeArgWithBase(n, t, s.sp, off)
+ a = s.expr(n)
}
return ssa.Param{Type: t, Offset: int32(off)}, a
}