if ssafn != nil && usessa {
genssa(ssafn, ptxt, gcargs, gclocals)
+ if Curfn.Func.Endlineno != 0 {
+ lineno = Curfn.Func.Endlineno
+ }
return
}
Genlist(Curfn.Func.Enter)
// Allocate starting values
s.labels = map[string]*ssaLabel{}
s.labeledNodes = map[*Node]*ssaLabel{}
- s.startmem = s.entryNewValue0(ssa.OpArg, ssa.TypeMem)
+ s.startmem = s.entryNewValue0(ssa.OpInitMem, ssa.TypeMem)
s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead
s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR])
if name == &memVar {
return s.startmem
}
+ if canSSA(name) {
+ v := s.entryNewValue0A(ssa.OpArg, t, name)
+ // v starts with AuxInt == 0.
+ s.addNamedValue(name, v)
+ return v
+ }
// variable is live at the entry block. Load it.
addr := s.decladdrs[name]
if addr == nil {
// Don't track autotmp_ variables.
return
}
- if n.Class == PPARAM || n.Class == PPARAMOUT {
- // TODO: Remove this
+ if n.Class == PAUTO && (v.Type.IsString() || v.Type.IsSlice() || v.Type.IsInterface()) {
+ // TODO: can't handle auto compound objects with pointers yet.
+ // The live variable analysis barfs because we don't put VARDEF
+ // pseudos in the right place when we spill to these nodes.
return
}
if n.Class == PAUTO && n.Xoffset != 0 {
s.Fatalf("AUTO var with offset %s %d", n, n.Xoffset)
}
- values, ok := s.f.NamedValues[n]
+ loc := ssa.LocalSlot{N: n, Type: n.Type, Off: 0}
+ values, ok := s.f.NamedValues[loc]
if !ok {
- s.f.Names = append(s.f.Names, n)
+ s.f.Names = append(s.f.Names, loc)
}
- s.f.NamedValues[n] = append(values, v)
+ s.f.NamedValues[loc] = append(values, v)
}
// an unresolved branch
return
}
p := Prog(movSizeByType(v.Type))
- n := autoVar(v.Args[0])
+ n, off := autoVar(v.Args[0])
p.From.Type = obj.TYPE_MEM
- p.From.Name = obj.NAME_AUTO
p.From.Node = n
p.From.Sym = Linksym(n.Sym)
+ p.From.Offset = off
+ if n.Class == PPARAM {
+ p.From.Name = obj.NAME_PARAM
+ p.From.Offset += n.Xoffset
+ } else {
+ p.From.Name = obj.NAME_AUTO
+ }
p.To.Type = obj.TYPE_REG
p.To.Reg = regnum(v)
p := Prog(movSizeByType(v.Type))
p.From.Type = obj.TYPE_REG
p.From.Reg = regnum(v.Args[0])
- n := autoVar(v)
+ n, off := autoVar(v)
p.To.Type = obj.TYPE_MEM
- p.To.Name = obj.NAME_AUTO
p.To.Node = n
p.To.Sym = Linksym(n.Sym)
+ p.To.Offset = off
+ if n.Class == PPARAM {
+ p.To.Name = obj.NAME_PARAM
+ p.To.Offset += n.Xoffset
+ } else {
+ p.To.Name = obj.NAME_AUTO
+ }
case ssa.OpPhi:
// just check to make sure regalloc and stackalloc did it right
if v.Type.IsMemory() {
v.Fatalf("const value %v shouldn't have a location", v)
}
- case ssa.OpArg:
+ case ssa.OpInitMem:
// memory arg needs no code
- // TODO: check that only mem arg goes here.
+ case ssa.OpArg:
+ // input args need no code
case ssa.OpAMD64LoweredGetClosurePtr:
// Output is hardwired to DX only,
// and DX contains the closure pointer on
return ssaRegToReg[reg.(*ssa.Register).Num]
}
-// autoVar returns a *Node representing the auto variable assigned to v.
-func autoVar(v *ssa.Value) *Node {
- return v.Block.Func.RegAlloc[v.ID].(*ssa.LocalSlot).N.(*Node)
+// autoVar returns a *Node and int64 representing the auto variable and offset within it
+// where v should be spilled.
+func autoVar(v *ssa.Value) (*Node, int64) {
+ loc := v.Block.Func.RegAlloc[v.ID].(ssa.LocalSlot)
+ return loc.N.(*Node), loc.Off
}
// ssaExport exports a bunch of compiler services for the ssa backend.
var passes = [...]pass{
{"phielim", phielim},
{"copyelim", copyelim},
- {"decompose", decompose},
{"early deadcode", deadcode}, // remove generated dead code to avoid doing pointless work during opt
+ {"decompose", decompose},
{"opt", opt},
{"opt deadcode", deadcode}, // remove any blocks orphaned during opt
{"generic cse", cse},
// NewFunc returns a new, empty function object
func (c *Config) NewFunc() *Func {
// TODO(khr): should this function take name, type, etc. as arguments?
- return &Func{Config: c, NamedValues: map[GCNode][]*Value{}}
+ return &Func{Config: c, NamedValues: map[LocalSlot][]*Value{}}
}
func (c *Config) Logf(msg string, args ...interface{}) { c.fe.Logf(msg, args...) }
}
f.Blocks = f.Blocks[:i]
- // Remove dead entries from namedValues map.
- for name, values := range f.NamedValues {
- i := 0
+ // Remove dead & duplicate entries from namedValues map.
+ s := newSparseSet(f.NumValues())
+ i = 0
+ for _, name := range f.Names {
+ j := 0
+ s.clear()
+ values := f.NamedValues[name]
for _, v := range values {
for v.Op == OpCopy {
v = v.Args[0]
}
- if live[v.ID] {
- values[i] = v
- i++
+ if live[v.ID] && !s.contains(v.ID) {
+ values[j] = v
+ j++
+ s.add(v.ID)
}
}
- f.NamedValues[name] = values[:i]
- tail := values[i:]
- for j := range tail {
- tail[j] = nil
+ if j == 0 {
+ delete(f.NamedValues, name)
+ } else {
+ f.Names[i] = name
+ i++
+ for k := len(values) - 1; k >= j; k-- {
+ values[k] = nil
+ }
+ f.NamedValues[name] = values[:j]
}
}
+ for k := len(f.Names) - 1; k >= i; k-- {
+ f.Names[k] = LocalSlot{}
+ }
+ f.Names = f.Names[:i]
// TODO: renumber Blocks and Values densely?
// TODO: save dead Values and Blocks for reuse? Or should we just let GC handle it?
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem")),
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("deadval", OpConst64, TypeInt64, 37, nil),
Goto("exit")),
Bloc("exit",
fun := Fun(c, "entry",
Bloc("entry",
Valu("cond", OpConstBool, TypeBool, 0, nil),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
If("cond", "then", "else")),
Bloc("then",
Goto("exit")),
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("cond", OpConstBool, TypeBool, 0, nil),
If("cond", "b2", "b4")),
Bloc("b2",
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr", Elem_: elemType} // dummy for testing
fun := Fun(c, "entry",
Bloc("entry",
- Valu("start", OpArg, TypeMem, 0, ".mem"),
+ Valu("start", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Valu("v", OpConstBool, TypeBool, 1, nil),
Valu("addr1", OpAddr, ptrType, 0, nil, "sb"),
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
fun := Fun(c, "entry",
Bloc("entry",
- Valu("start", OpArg, TypeMem, 0, ".mem"),
+ Valu("start", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Valu("v", OpConstBool, TypeBool, 1, nil),
Valu("addr", OpAddr, ptrType, 0, nil, "sb"),
t2 := &TypeImpl{Size_: 4, Ptr: true, Name: "t2"}
fun := Fun(c, "entry",
Bloc("entry",
- Valu("start", OpArg, TypeMem, 0, ".mem"),
+ Valu("start", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Valu("v", OpConstBool, TypeBool, 1, nil),
Valu("addr1", OpAddr, t1, 0, nil, "sb"),
}
}
}
- // TODO: decompose complex?
// TODO: decompose 64-bit ops on 32-bit archs?
+
+ // Split up named values into their components.
+ // NOTE: the component values we are making are dead at this point.
+ // We must do the opt pass before any deadcode elimination or we will
+ // lose the name->value correspondence.
+ for _, name := range f.Names {
+ t := name.Type
+ switch {
+ case t.IsComplex():
+ var elemType Type
+ if t.Size() == 16 {
+ elemType = f.Config.fe.TypeFloat64()
+ } else {
+ elemType = f.Config.fe.TypeFloat32()
+ }
+ rName := LocalSlot{name.N, elemType, name.Off}
+ iName := LocalSlot{name.N, elemType, name.Off + elemType.Size()}
+ f.Names = append(f.Names, rName, iName)
+ for _, v := range f.NamedValues[name] {
+ r := v.Block.NewValue1(v.Line, OpComplexReal, elemType, v)
+ i := v.Block.NewValue1(v.Line, OpComplexImag, elemType, v)
+ f.NamedValues[rName] = append(f.NamedValues[rName], r)
+ f.NamedValues[iName] = append(f.NamedValues[iName], i)
+ }
+ case t.IsString():
+ ptrType := f.Config.fe.TypeBytePtr()
+ lenType := f.Config.fe.TypeInt()
+ ptrName := LocalSlot{name.N, ptrType, name.Off}
+ lenName := LocalSlot{name.N, lenType, name.Off + f.Config.PtrSize}
+ f.Names = append(f.Names, ptrName, lenName)
+ for _, v := range f.NamedValues[name] {
+ ptr := v.Block.NewValue1(v.Line, OpStringPtr, ptrType, v)
+ len := v.Block.NewValue1(v.Line, OpStringLen, lenType, v)
+ f.NamedValues[ptrName] = append(f.NamedValues[ptrName], ptr)
+ f.NamedValues[lenName] = append(f.NamedValues[lenName], len)
+ }
+ case t.IsSlice():
+ ptrType := f.Config.fe.TypeBytePtr()
+ lenType := f.Config.fe.TypeInt()
+ ptrName := LocalSlot{name.N, ptrType, name.Off}
+ lenName := LocalSlot{name.N, lenType, name.Off + f.Config.PtrSize}
+ capName := LocalSlot{name.N, lenType, name.Off + 2*f.Config.PtrSize}
+ f.Names = append(f.Names, ptrName, lenName, capName)
+ for _, v := range f.NamedValues[name] {
+ ptr := v.Block.NewValue1(v.Line, OpSlicePtr, ptrType, v)
+ len := v.Block.NewValue1(v.Line, OpSliceLen, lenType, v)
+ cap := v.Block.NewValue1(v.Line, OpSliceCap, lenType, v)
+ f.NamedValues[ptrName] = append(f.NamedValues[ptrName], ptr)
+ f.NamedValues[lenName] = append(f.NamedValues[lenName], len)
+ f.NamedValues[capName] = append(f.NamedValues[capName], cap)
+ }
+ case t.IsInterface():
+ ptrType := f.Config.fe.TypeBytePtr()
+ typeName := LocalSlot{name.N, ptrType, name.Off}
+ dataName := LocalSlot{name.N, ptrType, name.Off + f.Config.PtrSize}
+ f.Names = append(f.Names, typeName, dataName)
+ for _, v := range f.NamedValues[name] {
+ typ := v.Block.NewValue1(v.Line, OpITab, ptrType, v)
+ data := v.Block.NewValue1(v.Line, OpIData, ptrType, v)
+ f.NamedValues[typeName] = append(f.NamedValues[typeName], typ)
+ f.NamedValues[dataName] = append(f.NamedValues[dataName], data)
+ }
+ //case t.IsStruct():
+ // TODO
+ case t.Size() > f.Config.IntSize:
+ f.Unimplementedf("undecomposed type %s", t)
+ }
+ }
}
func decomposeStringPhi(v *Value) {
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto(blockn(0)),
),
)
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto(blockn(0)),
),
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto(blockn(0)),
),
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto(blockn(0)),
),
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto(blockn(0)),
),
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Exit("mem")))
doms := map[string]string{}
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("a")),
Bloc("a",
Goto("b")),
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
If("p", "a", "c")),
Bloc("a",
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 0, nil),
If("p", "b3", "b5")),
Bloc("b2", Exit("mem")),
Bloc("entry",
Goto("first")),
Bloc("first",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto("a")),
Bloc("a",
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
If("p", "a", "c")),
Bloc("a",
c := testConfig(t)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
If("p", "a", "c")),
Bloc("a",
// note lack of an exit block
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("p", OpConstBool, TypeBool, 1, nil),
Goto("a")),
Bloc("a",
// when register allocation is done, maps value ids to locations
RegAlloc []Location
- // map from *gc.Node to set of Values that represent that Node.
- // The Node must be an ONAME with PPARAM, PPARAMOUT, or PAUTO class.
- NamedValues map[GCNode][]*Value
+ // map from LocalSlot to set of Values that we want to store in that slot.
+ NamedValues map[LocalSlot][]*Value
// Names is a copy of NamedValues.Keys. We keep a separate list
// of keys to make iteration order deterministic.
- Names []GCNode
+ Names []LocalSlot
}
// NumBlocks returns an integer larger than the id of any Block in the Func.
//
// fun := Fun("entry",
// Bloc("entry",
-// Valu("mem", OpArg, TypeMem, 0, ".mem"),
+// Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
// Goto("exit")),
// Bloc("exit",
// Exit("mem")),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem")))
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit"))),
},
}
{
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Exit("mem"))),
},
// value order changed
{
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Exit("mem"))),
{
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 26, nil),
Exit("mem"))),
},
{
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 0, 14),
Exit("mem"))),
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 0, 26),
Exit("mem"))),
},
{
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
Exit("mem"))),
Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("a", OpConst64, TypeInt64, 0, nil),
Valu("b", OpConst64, TypeInt64, 14, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "b", "a"),
(Load <t> ptr mem) && t.IsString() ->
(StringMake
(Load <config.fe.TypeBytePtr()> ptr mem)
- (Load <config.fe.TypeUintptr()>
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] ptr)
+ (Load <config.fe.TypeInt()>
+ (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr)
mem))
(Store [2*config.PtrSize] dst (StringMake ptr len) mem) ->
(Store [config.PtrSize]
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] dst)
+ (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst)
len
(Store [config.PtrSize] dst ptr mem))
(Load <t> ptr mem) && t.IsSlice() ->
(SliceMake
(Load <config.fe.TypeBytePtr()> ptr mem)
- (Load <config.fe.TypeUintptr()>
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] ptr)
+ (Load <config.fe.TypeInt()>
+ (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr)
mem)
- (Load <config.fe.TypeUintptr()>
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [2*config.PtrSize] ptr)
+ (Load <config.fe.TypeInt()>
+ (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] ptr)
mem))
(Store [3*config.PtrSize] dst (SliceMake ptr len cap) mem) ->
(Store [config.PtrSize]
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [2*config.PtrSize] dst)
+ (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst)
cap
(Store [config.PtrSize]
- (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] dst)
+ (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst)
len
(Store [config.PtrSize] dst ptr mem)))
// Get rid of Convert ops for pointer arithmetic on unsafe.Pointer.
(Convert (Add64 (Convert ptr) off)) -> (Add64 ptr off)
+
+// Decompose compound argument values
+(Arg {n} [off]) && v.Type.IsString() ->
+ (StringMake
+ (Arg <config.fe.TypeBytePtr()> {n} [off])
+ (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
+
+(Arg {n} [off]) && v.Type.IsSlice() ->
+ (SliceMake
+ (Arg <config.fe.TypeBytePtr()> {n} [off])
+ (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])
+ (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
+
+(Arg {n} [off]) && v.Type.IsInterface() ->
+ (IMake
+ (Arg <config.fe.TypeBytePtr()> {n} [off])
+ (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
+
+(Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 16 ->
+ (ComplexMake
+ (Arg <config.fe.TypeFloat64()> {n} [off])
+ (Arg <config.fe.TypeFloat64()> {n} [off+8]))
+
+(Arg {n} [off]) && v.Type.IsComplex() && v.Type.Size() == 8 ->
+ (ComplexMake
+ (Arg <config.fe.TypeFloat32()> {n} [off])
+ (Arg <config.fe.TypeFloat32()> {n} [off+4]))
// TODO: Const32F, ...
// Constant-like things
- {name: "Arg"}, // memory input to the function.
+ {name: "InitMem"}, // memory input to the function.
+ {name: "Arg"}, // argument to the function. aux=GCNode of arg, off = offset in that arg.
// The address of a variable. arg0 is the base pointer (SB or SP, depending
// on whether it is a global or stack variable). The Aux field identifies the
func (p htmlFuncPrinter) endDepCycle() {
fmt.Fprintln(p.w, "</span>")
}
+
+func (p htmlFuncPrinter) named(n LocalSlot, vals []*Value) {
+ // TODO
+}
package ssa
+import "fmt"
+
// A place that an ssa variable can reside.
type Location interface {
Name() string // name to use in assembly templates: %rax, 16(%rsp), ...
}
// A LocalSlot is a location in the stack frame.
+// It is (possibly a subpiece of) a PPARAM, PPARAMOUT, or PAUTO ONAME node.
type LocalSlot struct {
- N GCNode // a *gc.Node for an auto variable
+ N GCNode // an ONAME *gc.Node representing a variable on the stack
+ Type Type // type of slot
+ Off int64 // offset of slot in N
}
-func (s *LocalSlot) Name() string {
- return s.N.String()
+func (s LocalSlot) Name() string {
+ if s.Off == 0 {
+ return fmt.Sprintf("%s[%s]", s.N, s.Type)
+ }
+ return fmt.Sprintf("%s+%d[%s]", s.N, s.Off, s.Type)
}
continue // lowered
}
switch v.Op {
- case OpSP, OpSB, OpArg, OpCopy, OpPhi, OpVarDef, OpVarKill:
+ case OpSP, OpSB, OpInitMem, OpArg, OpCopy, OpPhi, OpVarDef, OpVarKill:
continue // ok not to lower
}
s := "not lowered: " + v.Op.String() + " " + v.Type.SimpleString()
var blocs []bloc
blocs = append(blocs,
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto(blockn(0)),
),
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Valu("sp", OpSP, TypeInvalid, 0, nil),
Valu("baddr", OpAddr, TypeBool, 0, "b", "sp"),
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
c := NewConfig("amd64", DummyFrontend{t}, nil)
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("sb", OpSB, TypeInvalid, 0, nil),
Goto("checkPtr")),
Bloc("checkPtr",
OpConst64F
OpConstInterface
OpConstSlice
+ OpInitMem
OpArg
OpAddr
OpSP
name: "ConstSlice",
generic: true,
},
+ {
+ name: "InitMem",
+ generic: true,
+ },
{
name: "Arg",
generic: true,
value(v *Value, live bool)
startDepCycle()
endDepCycle()
+ named(n LocalSlot, vals []*Value)
}
type stringFuncPrinter struct {
func (p stringFuncPrinter) endDepCycle() {}
+func (p stringFuncPrinter) named(n LocalSlot, vals []*Value) {
+ fmt.Fprintf(p.w, "name %s: %v\n", n.Name(), vals)
+}
+
func fprintFunc(p funcPrinter, f *Func) {
reachable, live := findlive(f)
p.header(f)
p.endBlock(b)
}
+ for name, vals := range f.NamedValues {
+ p.named(name, vals)
+ }
}
pc++
continue
}
+ if v.Op == OpArg {
+ // Args are "pre-spilled" values. We don't allocate
+ // any register here. We just set up the spill pointer to
+ // point at itself and any later user will restore it to use it.
+ s.values[v.ID].spill = v
+ s.values[v.ID].spillUsed = true // use is guaranteed
+ b.Values = append(b.Values, v)
+ pc++
+ continue
+ }
s.clearUses(pc*2 - 1)
regspec := opcodeTable[v.Op].reg
if regDebug {
c := testConfig(t)
f := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("x", OpAMD64MOVBconst, TypeInt8, 0, 1),
Valu("y", OpAMD64MOVBconst, TypeInt8, 0, 2),
Valu("a", OpAMD64TESTB, TypeBool, 0, nil, "x", "y"),
return rewriteValuegeneric_OpAnd64(v, config)
case OpAnd8:
return rewriteValuegeneric_OpAnd8(v, config)
+ case OpArg:
+ return rewriteValuegeneric_OpArg(v, config)
case OpArrayIndex:
return rewriteValuegeneric_OpArrayIndex(v, config)
case OpCom16:
;
return false
}
+func rewriteValuegeneric_OpArg(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (Arg {n} [off])
+ // cond: v.Type.IsString()
+ // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
+ {
+ n := v.Aux
+ off := v.AuxInt
+ if !(v.Type.IsString()) {
+ goto end939d3f946bf61eb85b46b374e7afa9e9
+ }
+ v.Op = OpStringMake
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v0 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v0.Type = config.fe.TypeBytePtr()
+ v0.Aux = n
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v1.Type = config.fe.TypeInt()
+ v1.Aux = n
+ v1.AuxInt = off + config.PtrSize
+ v.AddArg(v1)
+ return true
+ }
+ goto end939d3f946bf61eb85b46b374e7afa9e9
+end939d3f946bf61eb85b46b374e7afa9e9:
+ ;
+ // match: (Arg {n} [off])
+ // cond: v.Type.IsSlice()
+ // result: (SliceMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
+ {
+ n := v.Aux
+ off := v.AuxInt
+ if !(v.Type.IsSlice()) {
+ goto endab4b93ad3b1cf55e5bf25d1fd9cd498e
+ }
+ v.Op = OpSliceMake
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v0 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v0.Type = config.fe.TypeBytePtr()
+ v0.Aux = n
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v1.Type = config.fe.TypeInt()
+ v1.Aux = n
+ v1.AuxInt = off + config.PtrSize
+ v.AddArg(v1)
+ v2 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v2.Type = config.fe.TypeInt()
+ v2.Aux = n
+ v2.AuxInt = off + 2*config.PtrSize
+ v.AddArg(v2)
+ return true
+ }
+ goto endab4b93ad3b1cf55e5bf25d1fd9cd498e
+endab4b93ad3b1cf55e5bf25d1fd9cd498e:
+ ;
+ // match: (Arg {n} [off])
+ // cond: v.Type.IsInterface()
+ // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
+ {
+ n := v.Aux
+ off := v.AuxInt
+ if !(v.Type.IsInterface()) {
+ goto end851de8e588a39e81b4e2aef06566bf3e
+ }
+ v.Op = OpIMake
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v0 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v0.Type = config.fe.TypeBytePtr()
+ v0.Aux = n
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v1.Type = config.fe.TypeBytePtr()
+ v1.Aux = n
+ v1.AuxInt = off + config.PtrSize
+ v.AddArg(v1)
+ return true
+ }
+ goto end851de8e588a39e81b4e2aef06566bf3e
+end851de8e588a39e81b4e2aef06566bf3e:
+ ;
+ // match: (Arg {n} [off])
+ // cond: v.Type.IsComplex() && v.Type.Size() == 16
+ // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8]))
+ {
+ n := v.Aux
+ off := v.AuxInt
+ if !(v.Type.IsComplex() && v.Type.Size() == 16) {
+ goto end0988fc6a62c810b2f4976cb6cf44387f
+ }
+ v.Op = OpComplexMake
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v0 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v0.Type = config.fe.TypeFloat64()
+ v0.Aux = n
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v1.Type = config.fe.TypeFloat64()
+ v1.Aux = n
+ v1.AuxInt = off + 8
+ v.AddArg(v1)
+ return true
+ }
+ goto end0988fc6a62c810b2f4976cb6cf44387f
+end0988fc6a62c810b2f4976cb6cf44387f:
+ ;
+ // match: (Arg {n} [off])
+ // cond: v.Type.IsComplex() && v.Type.Size() == 8
+ // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4]))
+ {
+ n := v.Aux
+ off := v.AuxInt
+ if !(v.Type.IsComplex() && v.Type.Size() == 8) {
+ goto enda348e93e0036873dd7089a2939c22e3e
+ }
+ v.Op = OpComplexMake
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v0 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v0.Type = config.fe.TypeFloat32()
+ v0.Aux = n
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Line, OpArg, TypeInvalid)
+ v1.Type = config.fe.TypeFloat32()
+ v1.Aux = n
+ v1.AuxInt = off + 4
+ v.AddArg(v1)
+ return true
+ }
+ goto enda348e93e0036873dd7089a2939c22e3e
+enda348e93e0036873dd7089a2939c22e3e:
+ ;
+ return false
+}
func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool {
b := v.Block
_ = b
;
// match: (Load <t> ptr mem)
// cond: t.IsString()
- // result: (StringMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeUintptr()> (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] ptr) mem))
+ // result: (StringMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr) mem))
{
t := v.Type
ptr := v.Args[0]
mem := v.Args[1]
if !(t.IsString()) {
- goto end7c75255555bf9dd796298d9f6eaf9cf2
+ goto enddd15a6f3d53a6ce7a19d4e181dd1c13a
}
v.Op = OpStringMake
v.AuxInt = 0
v0.AddArg(mem)
v.AddArg(v0)
v1 := b.NewValue0(v.Line, OpLoad, TypeInvalid)
- v1.Type = config.fe.TypeUintptr()
+ v1.Type = config.fe.TypeInt()
v2 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v2.Type = config.fe.TypeUintptr().PtrTo()
+ v2.Type = config.fe.TypeInt().PtrTo()
v2.AuxInt = config.PtrSize
v2.AddArg(ptr)
v1.AddArg(v2)
v.AddArg(v1)
return true
}
- goto end7c75255555bf9dd796298d9f6eaf9cf2
-end7c75255555bf9dd796298d9f6eaf9cf2:
+ goto enddd15a6f3d53a6ce7a19d4e181dd1c13a
+enddd15a6f3d53a6ce7a19d4e181dd1c13a:
;
// match: (Load <t> ptr mem)
// cond: t.IsSlice()
- // result: (SliceMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeUintptr()> (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] ptr) mem) (Load <config.fe.TypeUintptr()> (OffPtr <config.fe.TypeUintptr().PtrTo()> [2*config.PtrSize] ptr) mem))
+ // result: (SliceMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr) mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] ptr) mem))
{
t := v.Type
ptr := v.Args[0]
mem := v.Args[1]
if !(t.IsSlice()) {
- goto end12c46556d962198680eb3238859e3016
+ goto end65e8b0055aa7491b9b6066d9fe1b2c13
}
v.Op = OpSliceMake
v.AuxInt = 0
v0.AddArg(mem)
v.AddArg(v0)
v1 := b.NewValue0(v.Line, OpLoad, TypeInvalid)
- v1.Type = config.fe.TypeUintptr()
+ v1.Type = config.fe.TypeInt()
v2 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v2.Type = config.fe.TypeUintptr().PtrTo()
+ v2.Type = config.fe.TypeInt().PtrTo()
v2.AuxInt = config.PtrSize
v2.AddArg(ptr)
v1.AddArg(v2)
v1.AddArg(mem)
v.AddArg(v1)
v3 := b.NewValue0(v.Line, OpLoad, TypeInvalid)
- v3.Type = config.fe.TypeUintptr()
+ v3.Type = config.fe.TypeInt()
v4 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v4.Type = config.fe.TypeUintptr().PtrTo()
+ v4.Type = config.fe.TypeInt().PtrTo()
v4.AuxInt = 2 * config.PtrSize
v4.AddArg(ptr)
v3.AddArg(v4)
v.AddArg(v3)
return true
}
- goto end12c46556d962198680eb3238859e3016
-end12c46556d962198680eb3238859e3016:
+ goto end65e8b0055aa7491b9b6066d9fe1b2c13
+end65e8b0055aa7491b9b6066d9fe1b2c13:
;
// match: (Load <t> ptr mem)
// cond: t.IsInterface()
;
// match: (Store [2*config.PtrSize] dst (StringMake ptr len) mem)
// cond:
- // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem))
+ // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem))
{
if v.AuxInt != 2*config.PtrSize {
- goto end12abe4021d24e76ed56d64b18730bffb
+ goto endd3a6ecebdad5899570a79fe5c62f34f1
}
dst := v.Args[0]
if v.Args[1].Op != OpStringMake {
- goto end12abe4021d24e76ed56d64b18730bffb
+ goto endd3a6ecebdad5899570a79fe5c62f34f1
}
ptr := v.Args[1].Args[0]
len := v.Args[1].Args[1]
v.resetArgs()
v.AuxInt = config.PtrSize
v0 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v0.Type = config.fe.TypeUintptr().PtrTo()
+ v0.Type = config.fe.TypeInt().PtrTo()
v0.AuxInt = config.PtrSize
v0.AddArg(dst)
v.AddArg(v0)
v.AddArg(v1)
return true
}
- goto end12abe4021d24e76ed56d64b18730bffb
-end12abe4021d24e76ed56d64b18730bffb:
+ goto endd3a6ecebdad5899570a79fe5c62f34f1
+endd3a6ecebdad5899570a79fe5c62f34f1:
;
// match: (Store [3*config.PtrSize] dst (SliceMake ptr len cap) mem)
// cond:
- // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeUintptr().PtrTo()> [2*config.PtrSize] dst) cap (Store [config.PtrSize] (OffPtr <config.fe.TypeUintptr().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem)))
+ // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst) cap (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem)))
{
if v.AuxInt != 3*config.PtrSize {
- goto end7498d25e17db5398cf073a8590e35cc2
+ goto endd5cc8c3dad7d24c845b0b88fc51487ae
}
dst := v.Args[0]
if v.Args[1].Op != OpSliceMake {
- goto end7498d25e17db5398cf073a8590e35cc2
+ goto endd5cc8c3dad7d24c845b0b88fc51487ae
}
ptr := v.Args[1].Args[0]
len := v.Args[1].Args[1]
v.resetArgs()
v.AuxInt = config.PtrSize
v0 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v0.Type = config.fe.TypeUintptr().PtrTo()
+ v0.Type = config.fe.TypeInt().PtrTo()
v0.AuxInt = 2 * config.PtrSize
v0.AddArg(dst)
v.AddArg(v0)
v1 := b.NewValue0(v.Line, OpStore, TypeInvalid)
v1.AuxInt = config.PtrSize
v2 := b.NewValue0(v.Line, OpOffPtr, TypeInvalid)
- v2.Type = config.fe.TypeUintptr().PtrTo()
+ v2.Type = config.fe.TypeInt().PtrTo()
v2.AuxInt = config.PtrSize
v2.AddArg(dst)
v1.AddArg(v2)
v.AddArg(v1)
return true
}
- goto end7498d25e17db5398cf073a8590e35cc2
-end7498d25e17db5398cf073a8590e35cc2:
+ goto endd5cc8c3dad7d24c845b0b88fc51487ae
+endd5cc8c3dad7d24c845b0b88fc51487ae:
;
// match: (Store [2*config.PtrSize] dst (IMake itab data) mem)
// cond:
cases := []fun{
Fun(c, "entry",
Bloc("entry",
- Valu("mem0", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem0", OpInitMem, TypeMem, 0, ".mem"),
Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil),
Valu("v", OpConst64, TypeInt64, 12, nil),
Valu("mem1", OpStore, TypeMem, 8, nil, "ptr", "v", "mem0"),
ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"}
fun := Fun(c, "entry",
Bloc("entry",
- Valu("mem", OpArg, TypeMem, 0, ".mem"),
+ Valu("mem", OpInitMem, TypeMem, 0, ".mem"),
Valu("SP", OpSP, TypeUInt64, 0, nil),
Valu("argptr", OpOffPtr, ptyp, 8, nil, "SP"),
Valu("resptr", OpOffPtr, ptyp, 16, nil, "SP"),
}
case v.Op == OpLoadReg:
s.add(v.Args[0].ID)
+ case v.Op == OpArg:
+ // This is an input argument which is pre-spilled. It is kind of
+ // like a StoreReg, but we don't remove v.ID here because we want
+ // this value to appear live even before this point. Being live
+ // all the way to the start of the entry block prevents other
+ // values from being allocated to the same slot and clobbering
+ // the input value before we have a chance to load it.
}
}
}
// Build map from values to their names, if any.
// A value may be associated with more than one name (e.g. after
// the assignment i=j). This step picks one name per value arbitrarily.
- names := make([]GCNode, f.NumValues())
+ names := make([]LocalSlot, f.NumValues())
for _, name := range f.Names {
// Note: not "range f.NamedValues" above, because
// that would be nondeterministic.
}
}
+ // Allocate args to their assigned locations.
+ for _, v := range f.Entry.Values {
+ if v.Op != OpArg {
+ continue
+ }
+ f.setHome(v, LocalSlot{v.Aux.(GCNode), v.Type, v.AuxInt})
+ }
+
// For each type, we keep track of all the stack slots we
// have allocated for that type.
- locations := map[Type][]*LocalSlot{}
+ locations := map[Type][]LocalSlot{}
// Each time we assign a stack slot to a value v, we remember
// the slot we used via an index into locations[v.Type].
// If this is a named value, try to use the name as
// the spill location.
- var name GCNode
+ var name LocalSlot
if v.Op == OpStoreReg {
name = names[v.Args[0].ID]
} else {
name = names[v.ID]
}
- if name != nil && v.Type.Equal(name.Typ()) {
+ if name.N != nil && v.Type.Equal(name.Type) {
for _, id := range interfere[v.ID] {
h := f.getHome(id)
- if h != nil && h.(*LocalSlot).N == name {
+ if h != nil && h.(LocalSlot) == name {
// A variable can interfere with itself.
// It is rare, but but it can happen.
goto noname
for _, a := range v.Args {
for _, id := range interfere[a.ID] {
h := f.getHome(id)
- if h != nil && h.(*LocalSlot).N == name {
+ if h != nil && h.(LocalSlot) == name {
goto noname
}
}
}
}
- loc := &LocalSlot{name}
- f.setHome(v, loc)
+ f.setHome(v, name)
if v.Op == OpPhi {
for _, a := range v.Args {
- f.setHome(a, loc)
+ f.setHome(a, name)
}
}
continue
}
// If there is no unused stack slot, allocate a new one.
if i == len(locs) {
- locs = append(locs, &LocalSlot{f.Config.fe.Auto(v.Type)})
+ locs = append(locs, LocalSlot{N: f.Config.fe.Auto(v.Type), Type: v.Type, Off: 0})
locations[v.Type] = locs
}
// Use the stack variable at that index for v.
for _, b := range f.Blocks {
for i := 0; i < len(b.Values); i++ {
v := b.Values[i]
- if v.Op == OpPhi || v.Op == OpGetClosurePtr || v.Op == OpConvert {
- // GetClosurePtr must stay in entry block.
+ if v.Op == OpPhi || v.Op == OpGetClosurePtr || v.Op == OpConvert || v.Op == OpArg {
+ // GetClosurePtr & Arg must stay in entry block.
// OpConvert must not float over call sites.
// TODO do we instead need a dependence edge of some sort for OpConvert?
// Would memory do the trick, or do we need something else that relates
"-ex", "echo END\n",
"-ex", "echo BEGIN print strvar\n",
"-ex", "print strvar",
- "-ex", "echo END\n",
- "-ex", "echo BEGIN print ptrvar\n",
- "-ex", "print ptrvar",
"-ex", "echo END\n"}
// without framepointer, gdb cannot backtrace our non-standard
t.Fatalf("print strvar failed: %s", bl)
}
- if bl := blocks["print ptrvar"]; !strVarRe.MatchString(bl) {
- t.Fatalf("print ptrvar failed: %s", bl)
- }
-
btGoroutineRe := regexp.MustCompile(`^#0\s+runtime.+at`)
if bl := blocks["goroutine 2 bt"]; canBackTrace && !btGoroutineRe.MatchString(bl) {
t.Fatalf("goroutine 2 bt failed: %s", bl)