From: Cherry Zhang Date: Thu, 22 Oct 2020 00:15:48 +0000 (-0400) Subject: cmd/compile: delete register maps, completely X-Git-Tag: go1.16beta1~380 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=84d7a85089009332756c18e876ec91f96b362ebf;p=gostls13.git cmd/compile: delete register maps, completely Remove go115ReduceLiveness feature gating flag, along with code that only needed when go115ReduceLiveness is false. Change-Id: I7571913cc74cbd17b330a0ee0160fefc9eeee66e Reviewed-on: https://go-review.googlesource.com/c/go/+/264338 Trust: Cherry Zhang Run-TryBot: Cherry Zhang Reviewed-by: Austin Clements --- diff --git a/src/cmd/compile/fmtmap_test.go b/src/cmd/compile/fmtmap_test.go index 179c60187f..0811df7f7b 100644 --- a/src/cmd/compile/fmtmap_test.go +++ b/src/cmd/compile/fmtmap_test.go @@ -105,10 +105,8 @@ var knownFormats = map[string]string{ "cmd/compile/internal/ssa.GCNode %v": "", "cmd/compile/internal/ssa.ID %d": "", "cmd/compile/internal/ssa.ID %v": "", - "cmd/compile/internal/ssa.LocPair %s": "", "cmd/compile/internal/ssa.LocalSlot %s": "", "cmd/compile/internal/ssa.LocalSlot %v": "", - "cmd/compile/internal/ssa.Location %T": "", "cmd/compile/internal/ssa.Location %s": "", "cmd/compile/internal/ssa.Op %s": "", "cmd/compile/internal/ssa.Op %v": "", diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go index ce5182f203..864ada1d3c 100644 --- a/src/cmd/compile/internal/gc/gsubr.go +++ b/src/cmd/compile/internal/gc/gsubr.go @@ -70,12 +70,8 @@ func newProgs(fn *Node, worker int) *Progs { pp.pos = fn.Pos pp.settext(fn) // PCDATA tables implicitly start with index -1. - pp.prevLive = LivenessIndex{-1, -1, false} - if go115ReduceLiveness { - pp.nextLive = pp.prevLive - } else { - pp.nextLive = LivenessInvalid - } + pp.prevLive = LivenessIndex{-1, false} + pp.nextLive = pp.prevLive return pp } @@ -120,31 +116,15 @@ func (pp *Progs) Prog(as obj.As) *obj.Prog { Addrconst(&p.From, objabi.PCDATA_StackMapIndex) Addrconst(&p.To, int64(idx)) } - if !go115ReduceLiveness { + if pp.nextLive.isUnsafePoint != pp.prevLive.isUnsafePoint { + // Emit unsafe-point marker. + pp.prevLive.isUnsafePoint = pp.nextLive.isUnsafePoint + p := pp.Prog(obj.APCDATA) + Addrconst(&p.From, objabi.PCDATA_UnsafePoint) if pp.nextLive.isUnsafePoint { - // Unsafe points are encoded as a special value in the - // register map. - pp.nextLive.regMapIndex = objabi.PCDATA_RegMapUnsafe - } - if pp.nextLive.regMapIndex != pp.prevLive.regMapIndex { - // Emit register map index change. - idx := pp.nextLive.regMapIndex - pp.prevLive.regMapIndex = idx - p := pp.Prog(obj.APCDATA) - Addrconst(&p.From, objabi.PCDATA_RegMapIndex) - Addrconst(&p.To, int64(idx)) - } - } else { - if pp.nextLive.isUnsafePoint != pp.prevLive.isUnsafePoint { - // Emit unsafe-point marker. - pp.prevLive.isUnsafePoint = pp.nextLive.isUnsafePoint - p := pp.Prog(obj.APCDATA) - Addrconst(&p.From, objabi.PCDATA_UnsafePoint) - if pp.nextLive.isUnsafePoint { - Addrconst(&p.To, objabi.PCDATA_UnsafePointUnsafe) - } else { - Addrconst(&p.To, objabi.PCDATA_UnsafePointSafe) - } + Addrconst(&p.To, objabi.PCDATA_UnsafePointUnsafe) + } else { + Addrconst(&p.To, objabi.PCDATA_UnsafePointSafe) } } diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go index 226eb45252..32aa7c5bb1 100644 --- a/src/cmd/compile/internal/gc/obj.go +++ b/src/cmd/compile/internal/gc/obj.go @@ -312,7 +312,7 @@ func addGCLocals() { if fn == nil { continue } - for _, gcsym := range []*obj.LSym{fn.GCArgs, fn.GCLocals, fn.GCRegs} { + for _, gcsym := range []*obj.LSym{fn.GCArgs, fn.GCLocals} { if gcsym != nil && !gcsym.OnList() { ggloblsym(gcsym, int32(len(gcsym.P)), obj.RODATA|obj.DUPOK) } diff --git a/src/cmd/compile/internal/gc/plive.go b/src/cmd/compile/internal/gc/plive.go index b471accb65..a48173e0d6 100644 --- a/src/cmd/compile/internal/gc/plive.go +++ b/src/cmd/compile/internal/gc/plive.go @@ -24,16 +24,6 @@ import ( "strings" ) -// go115ReduceLiveness disables register maps and only produces stack -// maps at call sites. -// -// In Go 1.15, we changed debug call injection to use conservative -// scanning instead of precise pointer maps, so these are no longer -// necessary. -// -// Keep in sync with runtime/preempt.go:go115ReduceLiveness. -const go115ReduceLiveness = true - // OpVarDef is an annotation for the liveness analysis, marking a place // where a complete initialization (definition) of a variable begins. // Since the liveness analysis can see initialization of single-word @@ -96,15 +86,15 @@ type BlockEffects struct { // // uevar: upward exposed variables (used before set in block) // varkill: killed variables (set in block) - uevar varRegVec - varkill varRegVec + uevar bvec + varkill bvec // Computed during Liveness.solve using control flow information: // // livein: variables live at block entry // liveout: variables live at block exit - livein varRegVec - liveout varRegVec + livein bvec + liveout bvec } // A collection of global state used by liveness analysis. @@ -128,16 +118,14 @@ type Liveness struct { // current Block during Liveness.epilogue. Indexed in Value // order for that block. Additionally, for the entry block // livevars[0] is the entry bitmap. Liveness.compact moves - // these to stackMaps and regMaps. - livevars []varRegVec + // these to stackMaps. + livevars []bvec // livenessMap maps from safe points (i.e., CALLs) to their // liveness map indexes. livenessMap LivenessMap stackMapSet bvecSet stackMaps []bvec - regMapSet map[liveRegMask]int - regMaps []liveRegMask cache progeffectscache } @@ -158,7 +146,7 @@ func (m *LivenessMap) reset() { delete(m.vals, k) } } - m.deferreturn = LivenessInvalid + m.deferreturn = LivenessDontCare } func (m *LivenessMap) set(v *ssa.Value, i LivenessIndex) { @@ -166,27 +154,17 @@ func (m *LivenessMap) set(v *ssa.Value, i LivenessIndex) { } func (m LivenessMap) Get(v *ssa.Value) LivenessIndex { - if !go115ReduceLiveness { - // All safe-points are in the map, so if v isn't in - // the map, it's an unsafe-point. - if idx, ok := m.vals[v.ID]; ok { - return idx - } - return LivenessInvalid - } - // If v isn't in the map, then it's a "don't care" and not an // unsafe-point. if idx, ok := m.vals[v.ID]; ok { return idx } - return LivenessIndex{StackMapDontCare, StackMapDontCare, false} + return LivenessIndex{StackMapDontCare, false} } // LivenessIndex stores the liveness map information for a Value. type LivenessIndex struct { stackMapIndex int - regMapIndex int // only for !go115ReduceLiveness // isUnsafePoint indicates that this is an unsafe-point. // @@ -197,8 +175,10 @@ type LivenessIndex struct { isUnsafePoint bool } -// LivenessInvalid indicates an unsafe point with no stack map. -var LivenessInvalid = LivenessIndex{StackMapDontCare, StackMapDontCare, true} // only for !go115ReduceLiveness +// LivenessDontCare indicates that the liveness information doesn't +// matter. Currently it is used in deferreturn liveness when we don't +// actually need it. It should never be emitted to the PCDATA stream. +var LivenessDontCare = LivenessIndex{StackMapDontCare, true} // StackMapDontCare indicates that the stack map index at a Value // doesn't matter. @@ -212,46 +192,12 @@ func (idx LivenessIndex) StackMapValid() bool { return idx.stackMapIndex != StackMapDontCare } -func (idx LivenessIndex) RegMapValid() bool { - return idx.regMapIndex != StackMapDontCare -} - type progeffectscache struct { retuevar []int32 tailuevar []int32 initialized bool } -// varRegVec contains liveness bitmaps for variables and registers. -type varRegVec struct { - vars bvec - regs liveRegMask -} - -func (v *varRegVec) Eq(v2 varRegVec) bool { - return v.vars.Eq(v2.vars) && v.regs == v2.regs -} - -func (v *varRegVec) Copy(v2 varRegVec) { - v.vars.Copy(v2.vars) - v.regs = v2.regs -} - -func (v *varRegVec) Clear() { - v.vars.Clear() - v.regs = 0 -} - -func (v *varRegVec) Or(v1, v2 varRegVec) { - v.vars.Or(v1.vars, v2.vars) - v.regs = v1.regs | v2.regs -} - -func (v *varRegVec) AndNot(v1, v2 varRegVec) { - v.vars.AndNot(v1.vars, v2.vars) - v.regs = v1.regs &^ v2.regs -} - // livenessShouldTrack reports whether the liveness analysis // should track the variable n. // We don't care about variables that have no pointers, @@ -400,110 +346,6 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) { } } -// regEffects returns the registers affected by v. -func (lv *Liveness) regEffects(v *ssa.Value) (uevar, kill liveRegMask) { - if go115ReduceLiveness { - return 0, 0 - } - if v.Op == ssa.OpPhi { - // All phi node arguments must come from the same - // register and the result must also go to that - // register, so there's no overall effect. - return 0, 0 - } - addLocs := func(mask liveRegMask, v *ssa.Value, ptrOnly bool) liveRegMask { - if int(v.ID) >= len(lv.f.RegAlloc) { - // v has no allocated registers. - return mask - } - loc := lv.f.RegAlloc[v.ID] - if loc == nil { - // v has no allocated registers. - return mask - } - if v.Op == ssa.OpGetG { - // GetG represents the G register, which is a - // pointer, but not a valid GC register. The - // current G is always reachable, so it's okay - // to ignore this register. - return mask - } - - // Collect registers and types from v's location. - var regs [2]*ssa.Register - nreg := 0 - switch loc := loc.(type) { - case ssa.LocalSlot: - return mask - case *ssa.Register: - if ptrOnly && !v.Type.HasPointers() { - return mask - } - regs[0] = loc - nreg = 1 - case ssa.LocPair: - // The value will have TTUPLE type, and the - // children are nil or *ssa.Register. - if v.Type.Etype != types.TTUPLE { - v.Fatalf("location pair %s has non-tuple type %v", loc, v.Type) - } - for i, loc1 := range &loc { - if loc1 == nil { - continue - } - if ptrOnly && !v.Type.FieldType(i).HasPointers() { - continue - } - regs[nreg] = loc1.(*ssa.Register) - nreg++ - } - default: - v.Fatalf("weird RegAlloc location: %s (%T)", loc, loc) - } - - // Add register locations to vars. - for _, reg := range regs[:nreg] { - if reg.GCNum() == -1 { - if ptrOnly { - v.Fatalf("pointer in non-pointer register %v", reg) - } else { - continue - } - } - mask |= 1 << uint(reg.GCNum()) - } - return mask - } - - // v clobbers all registers it writes to (whether or not the - // write is pointer-typed). - kill = addLocs(0, v, false) - for _, arg := range v.Args { - // v uses all registers is reads from, but we only - // care about marking those containing pointers. - uevar = addLocs(uevar, arg, true) - } - return uevar, kill -} - -type liveRegMask uint32 // only if !go115ReduceLiveness - -func (m liveRegMask) niceString(config *ssa.Config) string { - if m == 0 { - return "" - } - str := "" - for i, reg := range config.GCRegMap { - if m&(1<= f.NumBlocks() { lv.be = lc.be[:f.NumBlocks()] } - lv.livenessMap = LivenessMap{vals: lc.livenessMap.vals, deferreturn: LivenessInvalid} + lv.livenessMap = LivenessMap{vals: lc.livenessMap.vals, deferreturn: LivenessDontCare} lc.livenessMap.vals = nil } if lv.be == nil { @@ -546,10 +386,10 @@ func newliveness(fn *Node, f *ssa.Func, vars []*Node, idx map[*Node]int32, stkpt for _, b := range f.Blocks { be := lv.blockEffects(b) - be.uevar = varRegVec{vars: bulk.next()} - be.varkill = varRegVec{vars: bulk.next()} - be.livein = varRegVec{vars: bulk.next()} - be.liveout = varRegVec{vars: bulk.next()} + be.uevar = bulk.next() + be.varkill = bulk.next() + be.livein = bulk.next() + be.liveout = bulk.next() } lv.livenessMap.reset() @@ -637,20 +477,6 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) { } } -// usedRegs returns the maximum width of the live register map. -func (lv *Liveness) usedRegs() int32 { - var any liveRegMask - for _, live := range lv.regMaps { - any |= live - } - i := int32(0) - for any != 0 { - any >>= 1 - i++ - } - return i -} - // Generates live pointer value maps for arguments and local variables. The // this argument and the in arguments are always assumed live. The vars // argument is a slice of *Nodes. @@ -851,31 +677,16 @@ func (lv *Liveness) markUnsafePoints() { // particular, call Values can have a stack map in case the callee // grows the stack, but not themselves be a safe-point. func (lv *Liveness) hasStackMap(v *ssa.Value) bool { - // The runtime only has safe-points in function prologues, so - // we only need stack maps at call sites. go:nosplit functions - // are similar. - if go115ReduceLiveness || compiling_runtime || lv.f.NoSplit { - if !v.Op.IsCall() { - return false - } - // typedmemclr and typedmemmove are write barriers and - // deeply non-preemptible. They are unsafe points and - // hence should not have liveness maps. - if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == typedmemclr || sym.Fn == typedmemmove) { - return false - } - return true + if !v.Op.IsCall() { + return false } - - switch v.Op { - case ssa.OpInitMem, ssa.OpArg, ssa.OpSP, ssa.OpSB, - ssa.OpSelect0, ssa.OpSelect1, ssa.OpGetG, - ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, - ssa.OpPhi: - // These don't produce code (see genssa). + // typedmemclr and typedmemmove are write barriers and + // deeply non-preemptible. They are unsafe points and + // hence should not have liveness maps. + if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == typedmemclr || sym.Fn == typedmemmove) { return false } - return !lv.unsafePoints.Get(int32(v.ID)) + return true } // Initializes the sets for solving the live variables. Visits all the @@ -891,17 +702,13 @@ func (lv *Liveness) prologue() { // effects with the each prog effects. for j := len(b.Values) - 1; j >= 0; j-- { pos, e := lv.valueEffects(b.Values[j]) - regUevar, regKill := lv.regEffects(b.Values[j]) if e&varkill != 0 { - be.varkill.vars.Set(pos) - be.uevar.vars.Unset(pos) + be.varkill.Set(pos) + be.uevar.Unset(pos) } - be.varkill.regs |= regKill - be.uevar.regs &^= regKill if e&uevar != 0 { - be.uevar.vars.Set(pos) + be.uevar.Set(pos) } - be.uevar.regs |= regUevar } } } @@ -911,8 +718,8 @@ func (lv *Liveness) solve() { // These temporary bitvectors exist to avoid successive allocations and // frees within the loop. nvars := int32(len(lv.vars)) - newlivein := varRegVec{vars: bvalloc(nvars)} - newliveout := varRegVec{vars: bvalloc(nvars)} + newlivein := bvalloc(nvars) + newliveout := bvalloc(nvars) // Walk blocks in postorder ordering. This improves convergence. po := lv.f.Postorder() @@ -930,11 +737,11 @@ func (lv *Liveness) solve() { switch b.Kind { case ssa.BlockRet: for _, pos := range lv.cache.retuevar { - newliveout.vars.Set(pos) + newliveout.Set(pos) } case ssa.BlockRetJmp: for _, pos := range lv.cache.tailuevar { - newliveout.vars.Set(pos) + newliveout.Set(pos) } case ssa.BlockExit: // panic exit - nothing to do @@ -969,7 +776,7 @@ func (lv *Liveness) solve() { // variables at each safe point locations. func (lv *Liveness) epilogue() { nvars := int32(len(lv.vars)) - liveout := varRegVec{vars: bvalloc(nvars)} + liveout := bvalloc(nvars) livedefer := bvalloc(nvars) // always-live variables // If there is a defer (that could recover), then all output @@ -1025,12 +832,11 @@ func (lv *Liveness) epilogue() { { // Reserve an entry for function entry. live := bvalloc(nvars) - lv.livevars = append(lv.livevars, varRegVec{vars: live}) + lv.livevars = append(lv.livevars, live) } for _, b := range lv.f.Blocks { be := lv.blockEffects(b) - firstBitmapIndex := len(lv.livevars) // Walk forward through the basic block instructions and // allocate liveness maps for those instructions that need them. @@ -1040,7 +846,7 @@ func (lv *Liveness) epilogue() { } live := bvalloc(nvars) - lv.livevars = append(lv.livevars, varRegVec{vars: live}) + lv.livevars = append(lv.livevars, live) } // walk backward, construct maps at each safe point @@ -1056,21 +862,18 @@ func (lv *Liveness) epilogue() { live := &lv.livevars[index] live.Or(*live, liveout) - live.vars.Or(live.vars, livedefer) // only for non-entry safe points + live.Or(*live, livedefer) // only for non-entry safe points index-- } // Update liveness information. pos, e := lv.valueEffects(v) - regUevar, regKill := lv.regEffects(v) if e&varkill != 0 { - liveout.vars.Unset(pos) + liveout.Unset(pos) } - liveout.regs &^= regKill if e&uevar != 0 { - liveout.vars.Set(pos) + liveout.Set(pos) } - liveout.regs |= regUevar } if b == lv.f.Entry { @@ -1080,7 +883,7 @@ func (lv *Liveness) epilogue() { // Check to make sure only input variables are live. for i, n := range lv.vars { - if !liveout.vars.Get(int32(i)) { + if !liveout.Get(int32(i)) { continue } if n.Class() == PPARAM { @@ -1094,32 +897,16 @@ func (lv *Liveness) epilogue() { live.Or(*live, liveout) } - // Check that no registers are live across calls. - // For closure calls, the CALLclosure is the last use - // of the context register, so it's dead after the call. - index = int32(firstBitmapIndex) - for _, v := range b.Values { - if lv.hasStackMap(v) { - live := lv.livevars[index] - if v.Op.IsCall() && live.regs != 0 { - lv.printDebug() - v.Fatalf("%v register %s recorded as live at call", lv.fn.Func.Nname, live.regs.niceString(lv.f.Config)) - } - index++ - } - } - // The liveness maps for this block are now complete. Compact them. lv.compact(b) } // If we have an open-coded deferreturn call, make a liveness map for it. if lv.fn.Func.OpenCodedDeferDisallowed() { - lv.livenessMap.deferreturn = LivenessInvalid + lv.livenessMap.deferreturn = LivenessDontCare } else { lv.livenessMap.deferreturn = LivenessIndex{ stackMapIndex: lv.stackMapSet.add(livedefer), - regMapIndex: 0, // entry regMap, containing no live registers isUnsafePoint: false, } } @@ -1136,20 +923,10 @@ func (lv *Liveness) epilogue() { lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func.Nname, n) } } - if !go115ReduceLiveness { - // Check that no registers are live at function entry. - // The context register, if any, comes from a - // LoweredGetClosurePtr operation first thing in the function, - // so it doesn't appear live at entry. - if regs := lv.regMaps[0]; regs != 0 { - lv.printDebug() - lv.f.Fatalf("%v register %s recorded as live on entry", lv.fn.Func.Nname, regs.niceString(lv.f.Config)) - } - } } // Compact coalesces identical bitmaps from lv.livevars into the sets -// lv.stackMapSet and lv.regMaps. +// lv.stackMapSet. // // Compact clears lv.livevars. // @@ -1165,45 +942,23 @@ func (lv *Liveness) epilogue() { // PCDATA tables cost about 100k. So for now we keep using a single index for // both bitmap lists. func (lv *Liveness) compact(b *ssa.Block) { - add := func(live varRegVec, isUnsafePoint bool) LivenessIndex { // only if !go115ReduceLiveness - // Deduplicate the stack map. - stackIndex := lv.stackMapSet.add(live.vars) - // Deduplicate the register map. - regIndex, ok := lv.regMapSet[live.regs] - if !ok { - regIndex = len(lv.regMapSet) - lv.regMapSet[live.regs] = regIndex - lv.regMaps = append(lv.regMaps, live.regs) - } - return LivenessIndex{stackIndex, regIndex, isUnsafePoint} - } pos := 0 if b == lv.f.Entry { // Handle entry stack map. - if !go115ReduceLiveness { - add(lv.livevars[0], false) - } else { - lv.stackMapSet.add(lv.livevars[0].vars) - } + lv.stackMapSet.add(lv.livevars[0]) pos++ } for _, v := range b.Values { - if go115ReduceLiveness { - hasStackMap := lv.hasStackMap(v) - isUnsafePoint := lv.allUnsafe || lv.unsafePoints.Get(int32(v.ID)) - idx := LivenessIndex{StackMapDontCare, StackMapDontCare, isUnsafePoint} - if hasStackMap { - idx.stackMapIndex = lv.stackMapSet.add(lv.livevars[pos].vars) - pos++ - } - if hasStackMap || isUnsafePoint { - lv.livenessMap.set(v, idx) - } - } else if lv.hasStackMap(v) { - isUnsafePoint := lv.allUnsafe || lv.unsafePoints.Get(int32(v.ID)) - lv.livenessMap.set(v, add(lv.livevars[pos], isUnsafePoint)) + hasStackMap := lv.hasStackMap(v) + isUnsafePoint := lv.allUnsafe || lv.unsafePoints.Get(int32(v.ID)) + idx := LivenessIndex{StackMapDontCare, isUnsafePoint} + if hasStackMap { + idx.stackMapIndex = lv.stackMapSet.add(lv.livevars[pos]) pos++ } + if hasStackMap || isUnsafePoint { + lv.livenessMap.set(v, idx) + } } // Reset livevars. @@ -1250,8 +1005,8 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) { Warnl(pos, s) } -func (lv *Liveness) printbvec(printed bool, name string, live varRegVec) bool { - if live.vars.IsEmpty() && live.regs == 0 { +func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool { + if live.IsEmpty() { return printed } @@ -1264,19 +1019,18 @@ func (lv *Liveness) printbvec(printed bool, name string, live varRegVec) bool { comma := "" for i, n := range lv.vars { - if !live.vars.Get(int32(i)) { + if !live.Get(int32(i)) { continue } fmt.Printf("%s%s", comma, n.Sym.Name) comma = "," } - fmt.Printf("%s%s", comma, live.regs.niceString(lv.f.Config)) return true } -// printeffect is like printbvec, but for valueEffects and regEffects. -func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool, regMask liveRegMask) bool { - if !x && regMask == 0 { +// printeffect is like printbvec, but for valueEffects. +func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bool { + if !x { return printed } if !printed { @@ -1288,15 +1042,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool, re if x { fmt.Printf("%s", lv.vars[pos].Sym.Name) } - for j, reg := range lv.f.Config.GCRegMap { - if regMask&(1< 32 { - // Our uint32 conversion below won't work. - Fatalf("GP registers overflow uint32") - } - - if regs.n > 0 { - for _, live := range lv.regMaps { - regs.Clear() - regs.b[0] = uint32(live) - roff = dbvec(®sSymTmp, roff, regs) - } - } - } - // Give these LSyms content-addressable names, // so that they can be de-duplicated. // This provides significant binary size savings. @@ -1502,11 +1219,7 @@ func (lv *Liveness) emit() (argsSym, liveSym, regsSym *obj.LSym) { lsym.Set(obj.AttrContentAddressable, true) }) } - if !go115ReduceLiveness { - return makeSym(&argsSymTmp), makeSym(&liveSymTmp), makeSym(®sSymTmp) - } - // TODO(go115ReduceLiveness): Remove regsSym result - return makeSym(&argsSymTmp), makeSym(&liveSymTmp), nil + return makeSym(&argsSymTmp), makeSym(&liveSymTmp) } // Entry pointer for liveness analysis. Solves for the liveness of @@ -1553,7 +1266,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap { // Emit the live pointer map data structures ls := e.curfn.Func.lsym fninfo := ls.Func() - fninfo.GCArgs, fninfo.GCLocals, fninfo.GCRegs = lv.emit() + fninfo.GCArgs, fninfo.GCLocals = lv.emit() p := pp.Prog(obj.AFUNCDATA) Addrconst(&p.From, objabi.FUNCDATA_ArgsPointerMaps) @@ -1567,14 +1280,6 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap { p.To.Name = obj.NAME_EXTERN p.To.Sym = fninfo.GCLocals - if !go115ReduceLiveness { - p = pp.Prog(obj.AFUNCDATA) - Addrconst(&p.From, objabi.FUNCDATA_RegPointerMaps) - p.To.Type = obj.TYPE_MEM - p.To.Name = obj.NAME_EXTERN - p.To.Sym = fninfo.GCRegs - } - return lv.livenessMap } diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index 7388e4e3e8..67484904a9 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -6265,7 +6265,7 @@ func genssa(f *ssa.Func, pp *Progs) { // instruction. We won't use the actual liveness map on a // control instruction. Just mark it something that is // preemptible, unless this function is "all unsafe". - s.pp.nextLive = LivenessIndex{-1, -1, allUnsafe(f)} + s.pp.nextLive = LivenessIndex{-1, allUnsafe(f)} // Emit values in block thearch.SSAMarkMoves(&s, b) diff --git a/src/cmd/internal/obj/link.go b/src/cmd/internal/obj/link.go index c652e3adbb..8c8ff587ff 100644 --- a/src/cmd/internal/obj/link.go +++ b/src/cmd/internal/obj/link.go @@ -460,7 +460,6 @@ type FuncInfo struct { GCArgs *LSym GCLocals *LSym - GCRegs *LSym // Only if !go115ReduceLiveness StackObjects *LSym OpenCodedDeferInfo *LSym diff --git a/src/cmd/internal/obj/plist.go b/src/cmd/internal/obj/plist.go index eb54c67f6a..2b096996f7 100644 --- a/src/cmd/internal/obj/plist.go +++ b/src/cmd/internal/obj/plist.go @@ -178,7 +178,7 @@ func (ctxt *Link) Globl(s *LSym, size int64, flag int) { // Prog generated. func (ctxt *Link) EmitEntryLiveness(s *LSym, p *Prog, newprog ProgAlloc) *Prog { pcdata := ctxt.EmitEntryStackMap(s, p, newprog) - pcdata = ctxt.EmitEntryRegMap(s, pcdata, newprog) + pcdata = ctxt.EmitEntryUnsafePoint(s, pcdata, newprog) return pcdata } @@ -195,13 +195,13 @@ func (ctxt *Link) EmitEntryStackMap(s *LSym, p *Prog, newprog ProgAlloc) *Prog { return pcdata } -// Similar to EmitEntryLiveness, but just emit register map. -func (ctxt *Link) EmitEntryRegMap(s *LSym, p *Prog, newprog ProgAlloc) *Prog { +// Similar to EmitEntryLiveness, but just emit unsafe point map. +func (ctxt *Link) EmitEntryUnsafePoint(s *LSym, p *Prog, newprog ProgAlloc) *Prog { pcdata := Appendp(p, newprog) pcdata.Pos = s.Func().Text.Pos pcdata.As = APCDATA pcdata.From.Type = TYPE_CONST - pcdata.From.Offset = objabi.PCDATA_RegMapIndex + pcdata.From.Offset = objabi.PCDATA_UnsafePoint pcdata.To.Type = TYPE_CONST pcdata.To.Offset = -1 @@ -216,9 +216,9 @@ func (ctxt *Link) StartUnsafePoint(p *Prog, newprog ProgAlloc) *Prog { pcdata := Appendp(p, newprog) pcdata.As = APCDATA pcdata.From.Type = TYPE_CONST - pcdata.From.Offset = objabi.PCDATA_RegMapIndex + pcdata.From.Offset = objabi.PCDATA_UnsafePoint pcdata.To.Type = TYPE_CONST - pcdata.To.Offset = objabi.PCDATA_RegMapUnsafe + pcdata.To.Offset = objabi.PCDATA_UnsafePointUnsafe return pcdata } @@ -231,7 +231,7 @@ func (ctxt *Link) EndUnsafePoint(p *Prog, newprog ProgAlloc, oldval int64) *Prog pcdata := Appendp(p, newprog) pcdata.As = APCDATA pcdata.From.Type = TYPE_CONST - pcdata.From.Offset = objabi.PCDATA_RegMapIndex + pcdata.From.Offset = objabi.PCDATA_UnsafePoint pcdata.To.Type = TYPE_CONST pcdata.To.Offset = oldval @@ -257,11 +257,11 @@ func MarkUnsafePoints(ctxt *Link, p0 *Prog, newprog ProgAlloc, isUnsafePoint, is prevPcdata := int64(-1) // entry PC data value prevRestart := int64(0) for p := prev.Link; p != nil; p, prev = p.Link, p { - if p.As == APCDATA && p.From.Offset == objabi.PCDATA_RegMapIndex { + if p.As == APCDATA && p.From.Offset == objabi.PCDATA_UnsafePoint { prevPcdata = p.To.Offset continue } - if prevPcdata == objabi.PCDATA_RegMapUnsafe { + if prevPcdata == objabi.PCDATA_UnsafePointUnsafe { continue // already unsafe } if isUnsafePoint(p) { @@ -288,7 +288,7 @@ func MarkUnsafePoints(ctxt *Link, p0 *Prog, newprog ProgAlloc, isUnsafePoint, is q := Appendp(prev, newprog) q.As = APCDATA q.From.Type = TYPE_CONST - q.From.Offset = objabi.PCDATA_RegMapIndex + q.From.Offset = objabi.PCDATA_UnsafePoint q.To.Type = TYPE_CONST q.To.Offset = val q.Pc = p.Pc @@ -305,7 +305,7 @@ func MarkUnsafePoints(ctxt *Link, p0 *Prog, newprog ProgAlloc, isUnsafePoint, is p = Appendp(p, newprog) p.As = APCDATA p.From.Type = TYPE_CONST - p.From.Offset = objabi.PCDATA_RegMapIndex + p.From.Offset = objabi.PCDATA_UnsafePoint p.To.Type = TYPE_CONST p.To.Offset = prevPcdata p.Pc = p.Link.Pc diff --git a/src/cmd/internal/objabi/funcdata.go b/src/cmd/internal/objabi/funcdata.go index c9480bf2f0..1c5e5e1c8c 100644 --- a/src/cmd/internal/objabi/funcdata.go +++ b/src/cmd/internal/objabi/funcdata.go @@ -11,14 +11,12 @@ package objabi // ../../../runtime/symtab.go. const ( - PCDATA_RegMapIndex = 0 // if !go115ReduceLiveness - PCDATA_UnsafePoint = 0 // if go115ReduceLiveness + PCDATA_UnsafePoint = 0 PCDATA_StackMapIndex = 1 PCDATA_InlTreeIndex = 2 FUNCDATA_ArgsPointerMaps = 0 FUNCDATA_LocalsPointerMaps = 1 - FUNCDATA_RegPointerMaps = 2 // if !go115ReduceLiveness FUNCDATA_StackObjects = 3 FUNCDATA_InlTree = 4 FUNCDATA_OpenCodedDeferInfo = 5 @@ -32,11 +30,6 @@ const ( // Special PCDATA values. const ( - // PCDATA_RegMapIndex values. - // - // Only if !go115ReduceLiveness. - PCDATA_RegMapUnsafe = PCDATA_UnsafePointUnsafe // Unsafe for async preemption - // PCDATA_UnsafePoint values. PCDATA_UnsafePointSafe = -1 // Safe for async preemption PCDATA_UnsafePointUnsafe = -2 // Unsafe for async preemption