Base on gri@'s suggestion in CL 308971. "Rnd" is a bit random.
Change-Id: I4aad8b7992b31dfd26d20b3c332bc6e1e90f67db
Reviewed-on: https://go-review.googlesource.com/c/go/+/422036
Run-TryBot: Cuong Manh Le <cuong.manhle.vn@gmail.com>
Auto-Submit: Cuong Manh Le <cuong.manhle.vn@gmail.com>
Reviewed-by: Than McIntosh <thanm@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
result.inparams = append(result.inparams,
s.assignParamOrReturn(t, nil, false))
}
- s.stackOffset = types.Rnd(s.stackOffset, int64(types.RegSize))
+ s.stackOffset = types.RoundUp(s.stackOffset, int64(types.RegSize))
result.inRegistersUsed = s.rUsed.intRegs + s.rUsed.floatRegs
// Outputs
result.inparams = append(result.inparams,
s.assignParamOrReturn(f.Type, f.Nname, false))
}
- s.stackOffset = types.Rnd(s.stackOffset, int64(types.RegSize))
+ s.stackOffset = types.RoundUp(s.stackOffset, int64(types.RegSize))
result.inRegistersUsed = s.rUsed.intRegs + s.rUsed.floatRegs
// Outputs
if t == 0 {
return a
}
- return types.Rnd(a, int64(t))
+ return types.RoundUp(a, int64(t))
}
// stackSlot returns a stack offset for a param or result of the
// can register allocate, FALSE otherwise (and updates state
// accordingly).
func (state *assignState) regassignIntegral(t *types.Type) bool {
- regsNeeded := int(types.Rnd(t.Size(), int64(types.PtrSize)) / int64(types.PtrSize))
+ regsNeeded := int(types.RoundUp(t.Size(), int64(types.PtrSize)) / int64(types.PtrSize))
if t.IsComplex() {
regsNeeded = 2
}
for i, f := range fields {
typ := f.Type()
a := s.Alignof(typ)
- o = types.Rnd(o, a)
+ o = types.RoundUp(o, a)
offsets[i] = o
o += s.Sizeof(typ)
}
}
// gc: Size includes alignment padding.
- return types.Rnd(offsets[n-1]+last, s.Alignof(t))
+ return types.RoundUp(offsets[n-1]+last, s.Alignof(t))
case *types2.Interface:
return int64(types.PtrSize) * 2
case *types2.Chan, *types2.Map, *types2.Pointer, *types2.Signature:
}
func SymPtr(s *obj.LSym, off int, x *obj.LSym, xoff int) int {
- off = int(types.Rnd(int64(off), int64(types.PtrSize)))
+ off = int(types.RoundUp(int64(off), int64(types.PtrSize)))
s.WriteAddr(base.Ctxt, int64(off), types.PtrSize, x, int64(xoff))
off += types.PtrSize
return off
}
func SymPtrWeak(s *obj.LSym, off int, x *obj.LSym, xoff int) int {
- off = int(types.Rnd(int64(off), int64(types.PtrSize)))
+ off = int(types.RoundUp(int64(off), int64(types.PtrSize)))
s.WriteWeakAddr(base.Ctxt, int64(off), types.PtrSize, x, int64(xoff))
off += types.PtrSize
return off
if t.Sym() == nil && len(m) == 0 {
return ot
}
- noff := int(types.Rnd(int64(ot), int64(types.PtrSize)))
+ noff := int(types.RoundUp(int64(ot), int64(types.PtrSize)))
if noff != ot {
base.Fatalf("unexpected alignment in dextratype for %v", t)
}
w = 1
}
s.stksize += w
- s.stksize = types.Rnd(s.stksize, n.Type().Alignment())
+ s.stksize = types.RoundUp(s.stksize, n.Type().Alignment())
if n.Type().HasPointers() {
s.stkptrsize = s.stksize
lastHasPtr = true
n.SetFrameOffset(-s.stksize)
}
- s.stksize = types.Rnd(s.stksize, int64(types.RegSize))
- s.stkptrsize = types.Rnd(s.stkptrsize, int64(types.RegSize))
+ s.stksize = types.RoundUp(s.stksize, int64(types.RegSize))
+ s.stkptrsize = types.RoundUp(s.stkptrsize, int64(types.RegSize))
}
const maxStackSize = 1 << 30
typ = types.NewPtr(typ)
}
- offset = types.Rnd(offset, typ.Alignment())
+ offset = types.RoundUp(offset, typ.Alignment())
ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
offset += typ.Size()
for _, arg := range args {
t := arg.Type
- off = types.Rnd(off, t.Alignment())
+ off = types.RoundUp(off, t.Alignment())
size := t.Size()
callArgs = append(callArgs, arg)
callArgTypes = append(callArgTypes, t)
off += size
}
- off = types.Rnd(off, int64(types.RegSize))
+ off = types.RoundUp(off, int64(types.RegSize))
// Issue call
var call *ssa.Value
// Load results
res := make([]*ssa.Value, len(results))
for i, t := range results {
- off = types.Rnd(off, t.Alignment())
+ off = types.RoundUp(off, t.Alignment())
res[i] = s.resultOfCall(call, int64(i), t)
off += t.Size()
}
- off = types.Rnd(off, int64(types.PtrSize))
+ off = types.RoundUp(off, int64(types.PtrSize))
// Remember how much callee stack space we needed.
call.AuxInt = off
func defframe(s *State, e *ssafn, f *ssa.Func) {
pp := s.pp
- frame := types.Rnd(s.maxarg+e.stksize, int64(types.RegSize))
+ frame := types.RoundUp(s.maxarg+e.stksize, int64(types.RegSize))
if Arch.PadFrame != nil {
frame = Arch.PadFrame(frame)
}
// Fill in argument and frame size.
pp.Text.To.Type = obj.TYPE_TEXTSIZE
- pp.Text.To.Val = int32(types.Rnd(f.OwnAux.ArgWidth(), int64(types.RegSize)))
+ pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
pp.Text.To.Offset = frame
p := pp.Text
// the size of a pointer, set in gc.Main (see ../gc/main.go).
var defercalc int
-func Rnd(o int64, r int64) int64 {
+// RoundUp rounds o to a multiple of r, r is a power of 2.
+func RoundUp(o int64, r int64) int64 {
if r < 1 || r > 8 || r&(r-1) != 0 {
- base.Fatalf("rnd %d", r)
+ base.Fatalf("Round %d", r)
}
return (o + r - 1) &^ (r - 1)
}
maxalign = int32(f.Type.align)
}
if f.Type.align > 0 {
- o = Rnd(o, int64(f.Type.align))
+ o = RoundUp(o, int64(f.Type.align))
}
if isStruct { // For receiver/args/results, do not set, it depends on ABI
f.Offset = o
// final width is rounded
if flag != 0 {
- o = Rnd(o, int64(maxalign))
+ o = RoundUp(o, int64(maxalign))
}
t.align = uint8(maxalign)
}
SlicePtrOffset = 0
- SliceLenOffset = Rnd(SlicePtrOffset+int64(PtrSize), int64(PtrSize))
- SliceCapOffset = Rnd(SliceLenOffset+int64(PtrSize), int64(PtrSize))
- SliceSize = Rnd(SliceCapOffset+int64(PtrSize), int64(PtrSize))
+ SliceLenOffset = RoundUp(SlicePtrOffset+int64(PtrSize), int64(PtrSize))
+ SliceCapOffset = RoundUp(SliceLenOffset+int64(PtrSize), int64(PtrSize))
+ SliceSize = RoundUp(SliceCapOffset+int64(PtrSize), int64(PtrSize))
// string is same as slice wo the cap
- StringSize = Rnd(SliceLenOffset+int64(PtrSize), int64(PtrSize))
+ StringSize = RoundUp(SliceLenOffset+int64(PtrSize), int64(PtrSize))
for et := Kind(0); et < NTYPE; et++ {
SimType[et] = et