]> Cypherpunks repositories - gostls13.git/commitdiff
[dev.ssa] cmd/compile/internal/ssa: compute outarg size correctly
authorKeith Randall <khr@golang.org>
Tue, 14 Jul 2015 20:20:08 +0000 (13:20 -0700)
committerKeith Randall <khr@golang.org>
Thu, 16 Jul 2015 21:06:13 +0000 (21:06 +0000)
Keep track of the outargs size needed at each call.
Compute the size of the outargs section of the stack frame.  It's just
the max of the outargs size at all the callsites in the function.

Change-Id: I3d0640f654f01307633b1a5f75bab16e211ea6c0
Reviewed-on: https://go-review.googlesource.com/12178
Reviewed-by: Josh Bleecher Snyder <josharian@gmail.com>
src/cmd/compile/internal/gc/ssa.go
src/cmd/compile/internal/ssa/TODO
src/cmd/compile/internal/ssa/gen/AMD64.rules
src/cmd/compile/internal/ssa/rewriteAMD64.go
src/cmd/compile/internal/ssa/stackalloc.go

index 96351def6e8d87ba71ef3a71aa1acd30f64a3a03..2dad3e1a10da5ccfddd8ffc8d8818c4d5bc2d093 100644 (file)
@@ -626,6 +626,8 @@ func (s *state) expr(n *Node) *ssa.Value {
                        entry := s.newValue2(ssa.OpLoad, s.config.Uintptr, closure, s.mem())
                        call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, entry, closure, s.mem())
                }
+               dowidth(n.Left.Type)
+               call.AuxInt = n.Left.Type.Argwid // call operations carry the argsize of the callee along with them
                b := s.endBlock()
                b.Kind = ssa.BlockCall
                b.Control = call
index 340c905654461500e539cb828483349683916054..cfaf520510c830a30f2443534953a403eecf4ba1 100644 (file)
@@ -29,7 +29,6 @@ Regalloc
  - Make calls clobber all registers
 
 StackAlloc:
- - Compute size of outargs section correctly
  - Sort variables so all ptr-containing ones are first (so stack
    maps are smaller)
  - Reuse stack slots for noninterfering and type-compatible variables
index 6882621f710569ca2107fc4e58b5aa8df62411d8..47e1fb9c6a0219c202288e68b4fe5eb2390ffc7c 100644 (file)
@@ -88,8 +88,8 @@
 (If (SETB cmp) yes no) -> (ULT cmp yes no)
 (If cond yes no) && cond.Op == OpAMD64MOVBload -> (NE (TESTB <TypeFlags> cond cond) yes no)
 
-(StaticCall {target} mem) -> (CALLstatic {target} mem)
-(ClosureCall entry closure mem) -> (CALLclosure entry closure mem)
+(StaticCall [argwid] {target} mem) -> (CALLstatic [argwid] {target} mem)
+(ClosureCall [argwid] entry closure mem) -> (CALLclosure [argwid] entry closure mem)
 
 // Rules below here apply some simple optimizations after lowering.
 // TODO: Should this be a separate pass?
index 7393cd9a8974c38f492fe379a43205beab10513a..7e892c384475f5a5e76647419be3b503da7baddc 100644 (file)
@@ -460,10 +460,11 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
        endf8ca12fe79290bc82b11cfa463bc9413:
                ;
        case OpClosureCall:
-               // match: (ClosureCall entry closure mem)
+               // match: (ClosureCall [argwid] entry closure mem)
                // cond:
-               // result: (CALLclosure entry closure mem)
+               // result: (CALLclosure [argwid] entry closure mem)
                {
+                       argwid := v.AuxInt
                        entry := v.Args[0]
                        closure := v.Args[1]
                        mem := v.Args[2]
@@ -471,13 +472,14 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
                        v.AuxInt = 0
                        v.Aux = nil
                        v.resetArgs()
+                       v.AuxInt = argwid
                        v.AddArg(entry)
                        v.AddArg(closure)
                        v.AddArg(mem)
                        return true
                }
-               goto endee26da781e813a3c602ccb4f7ade98c7
-       endee26da781e813a3c602ccb4f7ade98c7:
+               goto endfd75d26316012d86cb71d0dd1214259b
+       endfd75d26316012d86cb71d0dd1214259b:
                ;
        case OpConst:
                // match: (Const <t> [val])
@@ -1611,22 +1613,24 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
        end78e66b6fc298684ff4ac8aec5ce873c9:
                ;
        case OpStaticCall:
-               // match: (StaticCall {target} mem)
+               // match: (StaticCall [argwid] {target} mem)
                // cond:
-               // result: (CALLstatic {target} mem)
+               // result: (CALLstatic [argwid] {target} mem)
                {
+                       argwid := v.AuxInt
                        target := v.Aux
                        mem := v.Args[0]
                        v.Op = OpAMD64CALLstatic
                        v.AuxInt = 0
                        v.Aux = nil
                        v.resetArgs()
+                       v.AuxInt = argwid
                        v.Aux = target
                        v.AddArg(mem)
                        return true
                }
-               goto end1948857a7cfc2a4f905045e58d3b9ec1
-       end1948857a7cfc2a4f905045e58d3b9ec1:
+               goto end32c5cbec813d1c2ae94fc9b1090e4b2a
+       end32c5cbec813d1c2ae94fc9b1090e4b2a:
                ;
        case OpStore:
                // match: (Store ptr val mem)
index 85a55ece7c4dfdbbee81d1fdc4ea99cdf9939b33..0bd64a1a141250b145850fe7c455c75a6ae5fc90 100644 (file)
@@ -9,12 +9,16 @@ package ssa
 func stackalloc(f *Func) {
        home := f.RegAlloc
 
-       // First compute the size of the outargs section.
-       n := int64(16) //TODO: compute max of all callsites
-
-       // Include one slot for deferreturn.
-       if false && n < f.Config.ptrSize { //TODO: check for deferreturn
-               n = f.Config.ptrSize
+       // Start with space for callee arguments/returns.
+       var n int64
+       for _, b := range f.Blocks {
+               if b.Kind != BlockCall {
+                       continue
+               }
+               v := b.Control
+               if n < v.AuxInt {
+                       n = v.AuxInt
+               }
        }
 
        // TODO: group variables by ptr/nonptr, size, etc.  Emit ptr vars last