continue
}
if n.Class != gc.PAUTO {
- gc.Fatal("needzero class %d", n.Class)
+ gc.Fatalf("needzero class %d", n.Class)
}
if n.Type.Width%int64(gc.Widthptr) != 0 || n.Xoffset%int64(gc.Widthptr) != 0 || n.Type.Width == 0 {
- gc.Fatal("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
+ gc.Fatalf("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
}
if lo != hi && n.Xoffset+n.Type.Width >= lo-int64(2*gc.Widthreg) {
if cnt%int64(gc.Widthreg) != 0 {
// should only happen with nacl
if cnt%int64(gc.Widthptr) != 0 {
- gc.Fatal("zerorange count not a multiple of widthptr %d", cnt)
+ gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
}
p = appendpp(p, x86.AMOVL, obj.TYPE_REG, x86.REG_AX, 0, obj.TYPE_MEM, x86.REG_SP, frame+lo)
lo += int64(gc.Widthptr)
switch uint32(ft)<<16 | uint32(tt) {
default:
- gc.Fatal("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
+ gc.Fatalf("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
/*
* integer copy and truncate
case x86.ALEAQ:
if f != nil && gc.Isconst(f, gc.CTNIL) {
- gc.Fatal("gins LEAQ nil %v", f.Type)
+ gc.Fatalf("gins LEAQ nil %v", f.Type)
}
}
if w != 0 && ((f != nil && p.From.Width < int64(w)) || (t != nil && p.To.Width > int64(w))) {
gc.Dump("f", f)
gc.Dump("t", t)
- gc.Fatal("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
+ gc.Fatalf("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
}
if p.To.Type == obj.TYPE_ADDR && w > 0 {
- gc.Fatal("bad use of addr: %v", p)
+ gc.Fatalf("bad use of addr: %v", p)
}
return p
*/
func optoas(op int, t *gc.Type) int {
if t == nil {
- gc.Fatal("optoas: t is nil")
+ gc.Fatalf("optoas: t is nil")
}
a := obj.AXXX
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
- gc.Fatal("optoas: no entry %v-%v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("optoas: no entry %v-%v", gc.Oconv(int(op), 0), t)
case gc.OADDR<<16 | gc.TPTR32:
a = x86.ALEAL
case x86.AJPS:
return x86.ASETPS
}
- gc.Fatal("jmptoset: no entry for %v", gc.Oconv(jmp, 0))
+ gc.Fatalf("jmptoset: no entry for %v", gc.Oconv(jmp, 0))
panic("unreachable")
}
for i := 1; i < o; i++ {
if oary[i] >= 0 {
- gc.Fatal("can't happen")
+ gc.Fatalf("can't happen")
}
gins(movptr, &n1, reg)
gc.Cgen_checknil(reg)
*/
func copyas(a *obj.Addr, v *obj.Addr) bool {
if x86.REG_AL <= a.Reg && a.Reg <= x86.REG_R15B {
- gc.Fatal("use of byte register")
+ gc.Fatalf("use of byte register")
}
if x86.REG_AL <= v.Reg && v.Reg <= x86.REG_R15B {
- gc.Fatal("use of byte register")
+ gc.Fatalf("use of byte register")
}
if a.Type != v.Type || a.Name != v.Name || a.Reg != v.Reg {
info := &p.Info
*info = progtable[p.As]
if info.Flags == 0 {
- gc.Fatal("unknown instruction %v", p)
+ gc.Fatalf("unknown instruction %v", p)
}
if (info.Flags&gc.ShiftCX != 0) && p.From.Type != obj.TYPE_CONST {
var op int
switch align {
default:
- gc.Fatal("sgen: invalid alignment %d for %v", align, n.Type)
+ gc.Fatalf("sgen: invalid alignment %d for %v", align, n.Type)
case 1:
op = arm.AMOVB
}
if w%int64(align) != 0 {
- gc.Fatal("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
+ gc.Fatalf("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
}
c := int32(w / int64(align))
if osrc%int64(align) != 0 || odst%int64(align) != 0 {
- gc.Fatal("sgen: unaligned offset src %d or dst %d (align %d)", osrc, odst, align)
+ gc.Fatalf("sgen: unaligned offset src %d or dst %d (align %d)", osrc, odst, align)
}
// if we are copying forward on the stack and
if res.Op != gc.OINDREG && res.Op != gc.ONAME {
gc.Dump("n", n)
gc.Dump("res", res)
- gc.Fatal("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0))
+ gc.Fatalf("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0))
}
l := n.Left
split64(l, &lo1, &hi1)
switch n.Op {
default:
- gc.Fatal("cgen64 %v", gc.Oconv(int(n.Op), 0))
+ gc.Fatalf("cgen64 %v", gc.Oconv(int(n.Op), 0))
case gc.OMINUS:
var lo2 gc.Node
// Do op. Leave result in ah:al.
switch n.Op {
default:
- gc.Fatal("cgen64: not implemented: %v\n", n)
+ gc.Fatalf("cgen64: not implemented: %v\n", n)
// TODO: Constants
case gc.OADD:
var br *obj.Prog
switch op {
default:
- gc.Fatal("cmp64 %v %v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("cmp64 %v %v", gc.Oconv(int(op), 0), t)
// cmp hi
// bne L
continue
}
if n.Class != gc.PAUTO {
- gc.Fatal("needzero class %d", n.Class)
+ gc.Fatalf("needzero class %d", n.Class)
}
if n.Type.Width%int64(gc.Widthptr) != 0 || n.Xoffset%int64(gc.Widthptr) != 0 || n.Type.Width == 0 {
- gc.Fatal("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
+ gc.Fatalf("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
}
if lo != hi && n.Xoffset+n.Type.Width >= lo-int64(2*gc.Widthptr) {
// merge with range we already have
p.To.Offset = int64(n2.Reg)
default:
- gc.Fatal("cgen_hmul %v", t)
+ gc.Fatalf("cgen_hmul %v", t)
}
gc.Cgen(&n1, res)
*/
func cgen_shift(op int, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) {
if nl.Type.Width > 4 {
- gc.Fatal("cgen_shift %v", nl.Type)
+ gc.Fatalf("cgen_shift %v", nl.Type)
}
w := int(nl.Type.Width * 8)
gc.Warnl(int(p.Lineno), "generated nil check")
}
if p.From.Type != obj.TYPE_REG {
- gc.Fatal("invalid nil check %v", p)
+ gc.Fatalf("invalid nil check %v", p)
}
reg = int(p.From.Reg)
*/
func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) {
if !gc.Is64(n.Type) {
- gc.Fatal("split64 %v", n.Type)
+ gc.Fatalf("split64 %v", n.Type)
}
if nsclean >= len(sclean) {
- gc.Fatal("split64 clean")
+ gc.Fatalf("split64 clean")
}
sclean[nsclean].Op = gc.OEMPTY
nsclean++
func splitclean() {
if nsclean <= 0 {
- gc.Fatal("splitclean")
+ gc.Fatalf("splitclean")
}
nsclean--
if sclean[nsclean].Op != gc.OEMPTY {
switch uint32(ft)<<16 | uint32(tt) {
default:
// should not happen
- gc.Fatal("gmove %v -> %v", f, t)
+ gc.Fatalf("gmove %v -> %v", f, t)
return
/*
case gc.TUINT64<<16 | gc.TFLOAT32,
gc.TUINT64<<16 | gc.TFLOAT64:
- gc.Fatal("gmove UINT64, TFLOAT not implemented")
+ gc.Fatalf("gmove UINT64, TFLOAT not implemented")
return
/*
// int32 v;
if f != nil && f.Op == gc.OINDEX {
- gc.Fatal("gins OINDEX not implemented")
+ gc.Fatalf("gins OINDEX not implemented")
}
// gc.Regalloc(&nod, ®node, Z);
// idx.reg = nod.reg;
// gc.Regfree(&nod);
if t != nil && t.Op == gc.OINDEX {
- gc.Fatal("gins OINDEX not implemented")
+ gc.Fatalf("gins OINDEX not implemented")
}
// gc.Regalloc(&nod, ®node, Z);
/* generate a comparison
TODO(kaib): one of the args can actually be a small constant. relax the constraint and fix call sites.
*/
- gc.Fatal("bad operands to gcmp")
+ gc.Fatalf("bad operands to gcmp")
}
p.From = p.To
p.To = obj.Addr{}
case arm.AMULU:
if f != nil && f.Op != gc.OREGISTER {
- gc.Fatal("bad operands to mul")
+ gc.Fatalf("bad operands to mul")
}
case arm.AMOVW:
if (p.From.Type == obj.TYPE_MEM || p.From.Type == obj.TYPE_ADDR || p.From.Type == obj.TYPE_CONST) && (p.To.Type == obj.TYPE_MEM || p.To.Type == obj.TYPE_ADDR) {
- gc.Fatal("gins double memory")
+ gc.Fatalf("gins double memory")
}
case arm.AADD:
if p.To.Type == obj.TYPE_MEM {
- gc.Fatal("gins arith to mem")
+ gc.Fatalf("gins arith to mem")
}
case arm.ARSB:
if p.From.Type == obj.TYPE_NONE {
- gc.Fatal("rsb with no from")
+ gc.Fatalf("rsb with no from")
}
}
gc.Naddr(&a, n)
if a.Type != obj.TYPE_REG {
if n != nil {
- gc.Fatal("bad in raddr: %v", gc.Oconv(int(n.Op), 0))
+ gc.Fatalf("bad in raddr: %v", gc.Oconv(int(n.Op), 0))
} else {
- gc.Fatal("bad in raddr: <null>")
+ gc.Fatalf("bad in raddr: <null>")
}
p.Reg = 0
} else {
*/
func gshift(as int, lhs *gc.Node, stype int32, sval int32, rhs *gc.Node) *obj.Prog {
if sval <= 0 || sval > 32 {
- gc.Fatal("bad shift value: %d", sval)
+ gc.Fatalf("bad shift value: %d", sval)
}
sval = sval & 0x1f
*/
func optoas(op int, t *gc.Type) int {
if t == nil {
- gc.Fatal("optoas: t is nil")
+ gc.Fatalf("optoas: t is nil")
}
a := obj.AXXX
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
- gc.Fatal("optoas: no entry %v-%v etype %v simtype %v", gc.Oconv(int(op), 0), t, gc.Types[t.Etype], gc.Types[gc.Simtype[t.Etype]])
+ gc.Fatalf("optoas: no entry %v-%v etype %v simtype %v", gc.Oconv(int(op), 0), t, gc.Types[t.Etype], gc.Types[gc.Simtype[t.Etype]])
/* case CASE(OADDR, TPTR32):
a = ALEAL;
for i := 1; i < o; i++ {
if oary[i] >= 0 {
- gc.Fatal("can't happen")
+ gc.Fatalf("can't happen")
}
gins(arm.AMOVW, &n1, reg)
gc.Cgen_checknil(reg)
info := &p.Info
*info = progtable[p.As]
if info.Flags == 0 {
- gc.Fatal("unknown instruction %v", p)
+ gc.Fatalf("unknown instruction %v", p)
}
if p.From.Type == obj.TYPE_ADDR && p.From.Sym != nil && (info.Flags&gc.LeftRead != 0) {
var op int
switch align {
default:
- gc.Fatal("sgen: invalid alignment %d for %v", align, n.Type)
+ gc.Fatalf("sgen: invalid alignment %d for %v", align, n.Type)
case 1:
op = arm64.AMOVB
}
if w%int64(align) != 0 {
- gc.Fatal("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
+ gc.Fatalf("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
}
c := int32(w / int64(align))
if osrc%int64(align) != 0 || odst%int64(align) != 0 {
- gc.Fatal("sgen: unaligned offset src %d or dst %d (align %d)", osrc, odst, align)
+ gc.Fatalf("sgen: unaligned offset src %d or dst %d (align %d)", osrc, odst, align)
}
// if we are copying forward on the stack and
continue
}
if n.Class != gc.PAUTO {
- gc.Fatal("needzero class %d", n.Class)
+ gc.Fatalf("needzero class %d", n.Class)
}
if n.Type.Width%int64(gc.Widthptr) != 0 || n.Xoffset%int64(gc.Widthptr) != 0 || n.Type.Width == 0 {
- gc.Fatal("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
+ gc.Fatalf("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
}
if lo != hi && n.Xoffset+n.Type.Width >= lo-int64(2*gc.Widthreg) {
}
default:
- gc.Fatal("cgen_hmul %v", t)
+ gc.Fatalf("cgen_hmul %v", t)
}
gc.Cgen(&n1, res)
gc.Warnl(int(p.Lineno), "generated nil check")
}
if p.From.Type != obj.TYPE_REG {
- gc.Fatal("invalid nil check %v\n", p)
+ gc.Fatalf("invalid nil check %v\n", p)
}
// check is
switch as {
default:
- gc.Fatal("ginscon2")
+ gc.Fatalf("ginscon2")
case arm64.ACMP:
if -arm64.BIG <= c && c <= arm64.BIG {
switch uint32(ft)<<16 | uint32(tt) {
default:
- gc.Fatal("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
+ gc.Fatalf("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
/*
* integer copy and truncate
case arm64.ACMP, arm64.AFCMPS, arm64.AFCMPD:
if t != nil {
if f.Op != gc.OREGISTER {
- gc.Fatal("bad operands to gcmp")
+ gc.Fatalf("bad operands to gcmp")
}
p.From = p.To
p.To = obj.Addr{}
case arm64.AAND, arm64.AMUL:
if p.From.Type == obj.TYPE_CONST {
gc.Debug['h'] = 1
- gc.Fatal("bad inst: %v", p)
+ gc.Fatalf("bad inst: %v", p)
}
case arm64.ACMP:
if p.From.Type == obj.TYPE_MEM || p.To.Type == obj.TYPE_MEM {
gc.Debug['h'] = 1
- gc.Fatal("bad inst: %v", p)
+ gc.Fatalf("bad inst: %v", p)
}
}
if w != 0 && ((f != nil && p.From.Width < int64(w)) || (t != nil && p.To.Type != obj.TYPE_REG && p.To.Width > int64(w))) {
gc.Dump("f", f)
gc.Dump("t", t)
- gc.Fatal("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
+ gc.Fatalf("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
}
return p
gc.Naddr(&a, n)
if a.Type != obj.TYPE_REG {
if n != nil {
- gc.Fatal("bad in raddr: %v", gc.Oconv(int(n.Op), 0))
+ gc.Fatalf("bad in raddr: %v", gc.Oconv(int(n.Op), 0))
} else {
- gc.Fatal("bad in raddr: <null>")
+ gc.Fatalf("bad in raddr: <null>")
}
p.Reg = 0
} else {
func gcmp(as int, lhs *gc.Node, rhs *gc.Node) *obj.Prog {
if lhs.Op != gc.OREGISTER {
- gc.Fatal("bad operands to gcmp: %v %v", gc.Oconv(int(lhs.Op), 0), gc.Oconv(int(rhs.Op), 0))
+ gc.Fatalf("bad operands to gcmp: %v %v", gc.Oconv(int(lhs.Op), 0), gc.Oconv(int(rhs.Op), 0))
}
p := rawgins(as, rhs, nil)
*/
func optoas(op int, t *gc.Type) int {
if t == nil {
- gc.Fatal("optoas: t is nil")
+ gc.Fatalf("optoas: t is nil")
}
a := int(obj.AXXX)
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
- gc.Fatal("optoas: no entry for op=%v type=%v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("optoas: no entry for op=%v type=%v", gc.Oconv(int(op), 0), t)
case gc.OEQ<<16 | gc.TBOOL,
gc.OEQ<<16 | gc.TINT8,
info := &p.Info
*info = progtable[p.As]
if info.Flags == 0 {
- gc.Fatal("proginfo: unknown instruction %v", p)
+ gc.Fatalf("proginfo: unknown instruction %v", p)
}
if (info.Flags&gc.RegRead != 0) && p.Reg == 0 {
func Rnd(o int64, r int64) int64 {
if r < 1 || r > 8 || r&(r-1) != 0 {
- Fatal("rnd %d", r)
+ Fatalf("rnd %d", r)
}
return (o + r - 1) &^ (r - 1)
}
o := int32(0)
for f := t.Type; f != nil; f = f.Down {
if f.Etype != TFIELD {
- Fatal("offmod: not TFIELD: %v", Tconv(f, obj.FmtLong))
+ Fatalf("offmod: not TFIELD: %v", Tconv(f, obj.FmtLong))
}
f.Width = int64(o)
o += int32(Widthptr)
var w int64
for f := t.Type; f != nil; f = f.Down {
if f.Etype != TFIELD {
- Fatal("widstruct: not TFIELD: %v", Tconv(f, obj.FmtLong))
+ Fatalf("widstruct: not TFIELD: %v", Tconv(f, obj.FmtLong))
}
if f.Type == nil {
// broken field, just skip it so that other valid fields
maxalign = int32(f.Type.Align)
}
if f.Type.Width < 0 {
- Fatal("invalid width %d", f.Type.Width)
+ Fatalf("invalid width %d", f.Type.Width)
}
w = f.Type.Width
if f.Type.Align > 0 {
func dowidth(t *Type) {
if Widthptr == 0 {
- Fatal("dowidth without betypeinit")
+ Fatalf("dowidth without betypeinit")
}
if t == nil {
if t.Width > 0 {
if t.Align == 0 {
// See issue 11354
- Fatal("zero alignment with nonzero size %v", t)
+ Fatalf("zero alignment with nonzero size %v", t)
}
return
}
w := int64(0)
switch et {
default:
- Fatal("dowidth: unknown type: %v", t)
+ Fatalf("dowidth: unknown type: %v", t)
/* compiler-specific stuff */
case TINT8, TUINT8, TBOOL:
// dummy type; should be replaced before use.
case TANY:
if Debug['A'] == 0 {
- Fatal("dowidth any")
+ Fatalf("dowidth any")
}
w = 1 // anything will do
case TSTRING:
if sizeof_String == 0 {
- Fatal("early dowidth string")
+ Fatalf("early dowidth string")
}
w = int64(sizeof_String)
t.Align = uint8(Widthptr)
t.Broke = 1
}
} else {
- Fatal("dowidth %v", t) // probably [...]T
+ Fatalf("dowidth %v", t) // probably [...]T
}
case TSTRUCT:
if t.Funarg != 0 {
- Fatal("dowidth fn struct %v", t)
+ Fatalf("dowidth fn struct %v", t)
}
w = widstruct(t, t, 0, 1)
t.Width = w
if t.Align == 0 {
if w > 8 || w&(w-1) != 0 {
- Fatal("invalid alignment for %v", t)
+ Fatalf("invalid alignment for %v", t)
}
t.Align = uint8(w)
}
// function arg structs should not be checked
// outside of the enclosing function.
if t.Funarg != 0 {
- Fatal("checkwidth %v", t)
+ Fatalf("checkwidth %v", t)
}
if defercalc == 0 {
func defercheckwidth() {
// we get out of sync on syntax errors, so don't be pedantic.
if defercalc != 0 && nerrors == 0 {
- Fatal("defercheckwidth")
+ Fatalf("defercheckwidth")
}
defercalc = 1
}
func resumecheckwidth() {
if defercalc == 0 {
- Fatal("resumecheckwidth")
+ Fatalf("resumecheckwidth")
}
for l := tlq; l != nil; l = tlq {
l.t.Deferwidth = 0
func typeinit() {
if Widthptr == 0 {
- Fatal("typeinit before betypeinit")
+ Fatalf("typeinit before betypeinit")
}
for i := 0; i < NTYPE; i++ {
etype = Thearch.Typedefs[i].Etype
if etype < 0 || etype >= len(Types) {
- Fatal("typeinit: %s bad etype", s.Name)
+ Fatalf("typeinit: %s bad etype", s.Name)
}
sameas = Thearch.Typedefs[i].Sameas
if sameas < 0 || sameas >= len(Types) {
- Fatal("typeinit: %s bad sameas", s.Name)
+ Fatalf("typeinit: %s bad sameas", s.Name)
}
Simtype[etype] = uint8(sameas)
minfltval[etype] = minfltval[sameas]
t = Types[etype]
if t != nil {
- Fatal("typeinit: %s already defined", s.Name)
+ Fatalf("typeinit: %s already defined", s.Name)
}
t = typ(etype)
w = (w + int64(Widthptr) - 1) &^ (int64(Widthptr) - 1)
if int64(int(w)) != w {
- Fatal("argsize too big")
+ Fatalf("argsize too big")
}
return int(w)
}
func bvcmp(bv1 Bvec, bv2 Bvec) int {
if bv1.n != bv2.n {
- Fatal("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
+ Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
}
for i, x := range bv1.b {
if x != bv2.b[i] {
func bvget(bv Bvec, i int32) int {
if i < 0 || i >= bv.n {
- Fatal("bvget: index %d is out of bounds with length %d\n", i, bv.n)
+ Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n)
}
return int((bv.b[i>>WORDSHIFT] >> uint(i&WORDMASK)) & 1)
}
func bvreset(bv Bvec, i int32) {
if i < 0 || i >= bv.n {
- Fatal("bvreset: index %d is out of bounds with length %d\n", i, bv.n)
+ Fatalf("bvreset: index %d is out of bounds with length %d\n", i, bv.n)
}
mask := uint32(^(1 << uint(i%WORDBITS)))
bv.b[i/WORDBITS] &= mask
func bvset(bv Bvec, i int32) {
if i < 0 || i >= bv.n {
- Fatal("bvset: index %d is out of bounds with length %d\n", i, bv.n)
+ Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n)
}
mask := uint32(1 << uint(i%WORDBITS))
bv.b[i/WORDBITS] |= mask
}
if res == nil || res.Type == nil {
- Fatal("cgen: res nil")
+ Fatalf("cgen: res nil")
}
for n.Op == OCONVNOP {
if n.Ullman >= UINF {
if n.Op == OINDREG {
- Fatal("cgen: this is going to miscompile")
+ Fatalf("cgen: this is going to miscompile")
}
if res.Ullman >= UINF {
var n1 Node
if Isfat(n.Type) {
if n.Type.Width < 0 {
- Fatal("forgot to compute width for %v", n.Type)
+ Fatalf("forgot to compute width for %v", n.Type)
}
sgen_wb(n, res, n.Type.Width, wb)
return
if n1.Ullman > res.Ullman {
Dump("n1", &n1)
Dump("res", res)
- Fatal("loop in cgen")
+ Fatalf("loop in cgen")
}
cgen_wb(&n1, res, wb)
if wb {
if int(Simtype[res.Type.Etype]) != Tptr {
- Fatal("cgen_wb of type %v", res.Type)
+ Fatalf("cgen_wb of type %v", res.Type)
}
if n.Ullman >= UINF {
var n1 Node
default:
Dump("cgen", n)
Dump("cgen-res", res)
- Fatal("cgen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+ Fatalf("cgen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
case OOROR, OANDAND,
OEQ, ONE,
break
}
- Fatal("cgen: OLEN: unknown type %v", Tconv(nl.Type, obj.FmtLong))
+ Fatalf("cgen: OLEN: unknown type %v", Tconv(nl.Type, obj.FmtLong))
case OCAP:
if Istype(nl.Type, TCHAN) {
break
}
- Fatal("cgen: OCAP: unknown type %v", Tconv(nl.Type, obj.FmtLong))
+ Fatalf("cgen: OCAP: unknown type %v", Tconv(nl.Type, obj.FmtLong))
case OADDR:
if n.Bounded { // let race detector avoid nil checks
}
if Isfat(n.Type) {
- Fatal("cgenr on fat node")
+ Fatalf("cgenr on fat node")
}
if n.Addable {
// constant index
if Isconst(nr, CTINT) {
if Isconst(nl, CTSTR) {
- Fatal("constant string constant index")
+ Fatalf("constant string constant index")
}
v := uint64(Mpgetfix(nr.Val().U.(*Mpint)))
var n2 Node
// constant index
if Isconst(nr, CTINT) {
if Isconst(nl, CTSTR) {
- Fatal("constant string constant index") // front end should handle
+ Fatalf("constant string constant index") // front end should handle
}
v := uint64(Mpgetfix(nr.Val().U.(*Mpint)))
if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
index:
if Isconst(nr, CTINT) {
if Isconst(nl, CTSTR) {
- Fatal("constant string constant index") // front end should handle
+ Fatalf("constant string constant index") // front end should handle
}
v := uint64(Mpgetfix(nr.Val().U.(*Mpint)))
if Isslice(nl.Type) || nl.Type.Etype == TSTRING {
if n.Addable {
if n.Op == OREGISTER {
- Fatal("agen OREGISTER")
+ Fatalf("agen OREGISTER")
}
var n1 Node
n1.Op = OADDR
switch n.Op {
default:
- Fatal("agen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+ Fatalf("agen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
case OCALLMETH:
cgen_callmeth(n, 0)
// should only get here with names in this func.
if n.Name.Funcdepth > 0 && n.Name.Funcdepth != Funcdepth {
Dump("bad agen", n)
- Fatal("agen: bad ONAME funcdepth %d != %d", n.Name.Funcdepth, Funcdepth)
+ Fatalf("agen: bad ONAME funcdepth %d != %d", n.Name.Funcdepth, Funcdepth)
}
// should only get here for heap vars or paramref
if n.Class&PHEAP == 0 && n.Class != PPARAMREF {
Dump("bad agen", n)
- Fatal("agen: bad ONAME class %#x", n.Class)
+ Fatalf("agen: bad ONAME class %#x", n.Class)
}
Cgen(n.Name.Heapaddr, res)
}
if n.Type.Etype != TBOOL {
- Fatal("bgen: bad type %v for %v", n.Type, Oconv(int(n.Op), 0))
+ Fatalf("bgen: bad type %v for %v", n.Type, Oconv(int(n.Op), 0))
}
for n.Op == OCONVNOP {
// We can fix that as we go.
switch Ctxt.Arch.Thechar {
case '5', '7', '9':
- Fatal("genval 5g, 7g, 9g ONAMES not fully implemented")
+ Fatalf("genval 5g, 7g, 9g ONAMES not fully implemented")
}
Cgen(n, res)
if !wantTrue {
case OLITERAL:
// n is a constant.
if !Isconst(n, CTBOOL) {
- Fatal("bgen: non-bool const %v\n", Nconv(n, obj.FmtLong))
+ Fatalf("bgen: non-bool const %v\n", Nconv(n, obj.FmtLong))
}
if genval {
Cgen(Nodbool(wantTrue == n.Val().U.(bool)), res)
switch Ctxt.Arch.Thechar {
case '5':
if genval {
- Fatal("genval 5g Isfloat special cases not implemented")
+ Fatalf("genval 5g Isfloat special cases not implemented")
}
switch n.Op {
case ONE:
}
case '7', '9':
if genval {
- Fatal("genval 7g, 9g Isfloat special cases not implemented")
+ Fatalf("genval 7g, 9g Isfloat special cases not implemented")
}
switch n.Op {
// On arm64 and ppc64, <= and >= mishandle NaN. Must decompose into < or > and =.
}
if n.Ullman >= UINF && ns.Ullman >= UINF {
- Fatal("sgen UINF")
+ Fatalf("sgen UINF")
}
if w < 0 {
- Fatal("sgen copy %d", w)
+ Fatalf("sgen copy %d", w)
}
// If copying .args, that's all the results, so record definition sites
switch proc {
default:
- Fatal("Ginscall: bad proc %d", proc)
+ Fatalf("Ginscall: bad proc %d", proc)
case 0, // normal call
-1: // normal call but no return
Ginscall(Newproc, 0)
} else {
if Hasdefer == 0 {
- Fatal("hasdefer=0 but has defer")
+ Fatalf("hasdefer=0 but has defer")
}
Ginscall(Deferproc, 0)
}
func cgen_callinter(n *Node, res *Node, proc int) {
i := n.Left
if i.Op != ODOTINTER {
- Fatal("cgen_callinter: not ODOTINTER %v", Oconv(int(i.Op), 0))
+ Fatalf("cgen_callinter: not ODOTINTER %v", Oconv(int(i.Op), 0))
}
f := i.Right // field
if f.Op != ONAME {
- Fatal("cgen_callinter: not ONAME %v", Oconv(int(f.Op), 0))
+ Fatalf("cgen_callinter: not ONAME %v", Oconv(int(f.Op), 0))
}
i = i.Left // interface
var nodr Node
Regalloc(&nodr, Types[Tptr], &nodo)
if n.Left.Xoffset == BADWIDTH {
- Fatal("cgen_callinter: badwidth")
+ Fatalf("cgen_callinter: badwidth")
}
Cgen_checknil(&nodo) // in case offset is huge
nodo.Op = OINDREG
var flist Iter
fp := Structfirst(&flist, Getoutarg(t))
if fp == nil {
- Fatal("cgen_callret: nil")
+ Fatalf("cgen_callret: nil")
}
var nod Node
var flist Iter
fp := Structfirst(&flist, Getoutarg(t))
if fp == nil {
- Fatal("cgen_aret: nil")
+ Fatalf("cgen_aret: nil")
}
var nod1 Node
if res.Op != ONAME && !samesafeexpr(res, n.List.N) {
Dump("cgen_append-n", n)
Dump("cgen_append-res", res)
- Fatal("append not lowered")
+ Fatalf("append not lowered")
}
for l := n.List; l != nil; l = l.Next {
if l.N.Ullman >= UINF {
- Fatal("append with function call arguments")
+ Fatalf("append with function call arguments")
}
}
// but it will be represented in 32 bits.
if Ctxt.Arch.Regsize == 4 && Is64(n1.Type) {
if mpcmpfixc(n1.Val().U.(*Mpint), 1<<31) >= 0 {
- Fatal("missed slice out of bounds check")
+ Fatalf("missed slice out of bounds check")
}
var tmp Node
Nodconst(&tmp, indexRegType, Mpgetfix(n1.Val().U.(*Mpint)))
if !n.Name.Captured {
n.Name.Captured = true
if n.Name.Decldepth == 0 {
- Fatal("typecheckclosure: var %v does not have decldepth assigned", Nconv(n, obj.FmtShort))
+ Fatalf("typecheckclosure: var %v does not have decldepth assigned", Nconv(n, obj.FmtShort))
}
// Ignore assignments to the variable in straightline code
n.Func.Outerfunc.Func.Closgen++
gen = n.Func.Outerfunc.Func.Closgen
} else {
- Fatal("closurename called for %v", Nconv(n, obj.FmtShort))
+ Fatalf("closurename called for %v", Nconv(n, obj.FmtShort))
}
n.Sym = Lookupf("%s.%s%d", outer, prefix, gen)
return n.Sym
xfunc.Nbody = func_.Nbody
xfunc.Func.Dcl = concat(func_.Func.Dcl, xfunc.Func.Dcl)
if xfunc.Nbody == nil {
- Fatal("empty body - won't generate any code")
+ Fatalf("empty body - won't generate any code")
}
typecheck(&xfunc, Etop)
// Recalculate param offsets.
if f.Type.Width > 0 {
- Fatal("transformclosure: width is already calculated")
+ Fatalf("transformclosure: width is already calculated")
}
dowidth(f.Type)
xfunc.Type = f.Type // update type of ODCLFUNC
break
default:
- Fatal("invalid typecheckpartialcall")
+ Fatalf("invalid typecheckpartialcall")
}
// Create top-level function.
basetype = basetype.Type
}
if basetype.Etype != TINTER && basetype.Sym == nil {
- Fatal("missing base type for %v", rcvrtype)
+ Fatalf("missing base type for %v", rcvrtype)
}
var spkg *Pkg
// n must be an integer constant.
func (n *Node) Int() int64 {
if !Isconst(n, CTINT) {
- Fatal("Int(%v)", n)
+ Fatalf("Int(%v)", n)
}
return Mpgetfix(n.Val().U.(*Mpint))
}
// n must be an integer constant.
func (n *Node) SetInt(i int64) {
if !Isconst(n, CTINT) {
- Fatal("SetInt(%v)", n)
+ Fatalf("SetInt(%v)", n)
}
Mpmovecfix(n.Val().U.(*Mpint), i)
}
// n must be an integer constant.
func (n *Node) SetBigInt(x *big.Int) {
if !Isconst(n, CTINT) {
- Fatal("SetBigInt(%v)", n)
+ Fatalf("SetBigInt(%v)", n)
}
n.Val().U.(*Mpint).Val.Set(x)
}
// n must be an boolean constant.
func (n *Node) Bool() bool {
if !Isconst(n, CTBOOL) {
- Fatal("Int(%v)", n)
+ Fatalf("Int(%v)", n)
}
return n.Val().U.(bool)
}
switch v.Ctype() {
case CTINT, CTRUNE:
if !Isint[t.Etype] {
- Fatal("overflow: %v integer constant", t)
+ Fatalf("overflow: %v integer constant", t)
}
if Mpcmpfixfix(v.U.(*Mpint), Minintval[t.Etype]) < 0 || Mpcmpfixfix(v.U.(*Mpint), Maxintval[t.Etype]) > 0 {
return true
case CTFLT:
if !Isfloat[t.Etype] {
- Fatal("overflow: %v floating-point constant", t)
+ Fatalf("overflow: %v floating-point constant", t)
}
if mpcmpfltflt(v.U.(*Mpflt), minfltval[t.Etype]) <= 0 || mpcmpfltflt(v.U.(*Mpflt), maxfltval[t.Etype]) >= 0 {
return true
case CTCPLX:
if !Iscomplex[t.Etype] {
- Fatal("overflow: %v complex constant", t)
+ Fatalf("overflow: %v complex constant", t)
}
if mpcmpfltflt(&v.U.(*Mpcplx).Real, minfltval[t.Etype]) <= 0 || mpcmpfltflt(&v.U.(*Mpcplx).Real, maxfltval[t.Etype]) >= 0 || mpcmpfltflt(&v.U.(*Mpcplx).Imag, minfltval[t.Etype]) <= 0 || mpcmpfltflt(&v.U.(*Mpcplx).Imag, maxfltval[t.Etype]) >= 0 {
return true
if (v.Ctype() == 0 || rv.Ctype() == 0) && nerrors > 0 {
return
}
- Fatal("constant type mismatch %v(%d) %v(%d)", nl.Type, v.Ctype(), nr.Type, rv.Ctype())
+ Fatalf("constant type mismatch %v(%d) %v(%d)", nl.Type, v.Ctype(), nr.Type, rv.Ctype())
}
// run op
n.SetVal(v)
switch v.Ctype() {
default:
- Fatal("nodlit ctype %d", v.Ctype())
+ Fatalf("nodlit ctype %d", v.Ctype())
case CTSTR:
n.Type = idealstring
n.SetVal(Val{c})
if r.Ctype() != CTFLT || i.Ctype() != CTFLT {
- Fatal("nodcplxlit ctype %d/%d", r.Ctype(), i.Ctype())
+ Fatalf("nodcplxlit ctype %d/%d", r.Ctype(), i.Ctype())
}
mpmovefltflt(&c.Real, r.U.(*Mpflt))
Yyerror("defaultlit: unknown literal: %v", n)
case CTxxx:
- Fatal("defaultlit: idealkind is CTxxx: %v", Nconv(n, obj.FmtSign))
+ Fatalf("defaultlit: idealkind is CTxxx: %v", Nconv(n, obj.FmtSign))
case CTBOOL:
t1 := Types[TBOOL]
var i int64
switch n.Val().Ctype() {
default:
- Fatal("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, obj.FmtLong))
+ Fatalf("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, obj.FmtLong))
case CTINT, CTRUNE:
i = Mpgetfix(n.Val().U.(*Mpint))
if Isfloat[tt] {
con.SetVal(toflt(con.Val()))
if con.Val().Ctype() != CTFLT {
- Fatal("convconst ctype=%d %v", con.Val().Ctype(), t)
+ Fatalf("convconst ctype=%d %v", con.Val().Ctype(), t)
}
if tt == TFLOAT32 {
con.SetVal(Val{truncfltlit(con.Val().U.(*Mpflt), t)})
return
}
- Fatal("convconst %v constant", Tconv(t, obj.FmtLong))
+ Fatalf("convconst %v constant", Tconv(t, obj.FmtLong))
}
// complex multiply v *= rv
// break addable nc-complex into nr-real and ni-imaginary
func subnode(nr *Node, ni *Node, nc *Node) {
if !nc.Addable {
- Fatal("subnode not addable")
+ Fatalf("subnode not addable")
}
tc := Simsimtype(nc.Type)
n.Type = t
if !Isfloat[t.Etype] {
- Fatal("nodfconst: bad type %v", t)
+ Fatalf("nodfconst: bad type %v", t)
}
}
}
if !t.Addable {
- Fatal("complexmove: to not addable")
+ Fatalf("complexmove: to not addable")
}
ft := Simsimtype(f.Type)
tt := Simsimtype(t.Type)
switch uint32(ft)<<16 | uint32(tt) {
default:
- Fatal("complexmove: unknown conversion: %v -> %v\n", f.Type, t.Type)
+ Fatalf("complexmove: unknown conversion: %v -> %v\n", f.Type, t.Type)
// complex to complex move/convert.
// make f addable.
switch n.Op {
default:
Dump("complexgen: unknown op", n)
- Fatal("complexgen: unknown op %v", Oconv(int(n.Op), 0))
+ Fatalf("complexgen: unknown op %v", Oconv(int(n.Op), 0))
case ODOT,
ODOTPTR,
switch n.Op {
default:
- Fatal("complexgen: unknown op %v", Oconv(int(n.Op), 0))
+ Fatalf("complexgen: unknown op %v", Oconv(int(n.Op), 0))
case OCONV:
Complexmove(nl, res)
}
if d == nil {
- Fatal("popdcl: no mark")
+ Fatalf("popdcl: no mark")
}
dclstack = d.Link
block = d.Block
}
} else {
if Curfn == nil && ctxt == PAUTO {
- Fatal("automatic outside function")
+ Fatalf("automatic outside function")
}
if Curfn != nil {
Curfn.Func.Dcl = list(Curfn.Func.Dcl, n)
func addvar(n *Node, t *Type, ctxt uint8) {
if n == nil || n.Sym == nil || (n.Op != ONAME && n.Op != ONONAME) || t == nil {
- Fatal("addvar: n=%v t=%v nil", n, t)
+ Fatalf("addvar: n=%v t=%v nil", n, t)
}
n.Op = ONAME
*/
func newname(s *Sym) *Node {
if s == nil {
- Fatal("newname nil")
+ Fatalf("newname nil")
}
n := Nod(ONAME, nil, nil)
*/
func ifacedcl(n *Node) {
if n.Op != ODCLFIELD || n.Right == nil {
- Fatal("ifacedcl")
+ Fatalf("ifacedcl")
}
if isblank(n.Left) {
func funchdr(n *Node) {
// change the declaration context from extern to auto
if Funcdepth == 0 && dclcontext != PEXTERN {
- Fatal("funchdr: dclcontext")
+ Fatalf("funchdr: dclcontext")
}
if importpkg == nil && n.Func.Nname != nil {
func funcargs(nt *Node) {
if nt.Op != OTFUNC {
- Fatal("funcargs %v", Oconv(int(nt.Op), 0))
+ Fatalf("funcargs %v", Oconv(int(nt.Op), 0))
}
// re-start the variable generation number
if nt.Left != nil {
n := nt.Left
if n.Op != ODCLFIELD {
- Fatal("funcargs receiver %v", Oconv(int(n.Op), 0))
+ Fatalf("funcargs receiver %v", Oconv(int(n.Op), 0))
}
if n.Left != nil {
n.Left.Op = ONAME
for l := nt.List; l != nil; l = l.Next {
n = l.N
if n.Op != ODCLFIELD {
- Fatal("funcargs in %v", Oconv(int(n.Op), 0))
+ Fatalf("funcargs in %v", Oconv(int(n.Op), 0))
}
if n.Left != nil {
n.Left.Op = ONAME
n = l.N
if n.Op != ODCLFIELD {
- Fatal("funcargs out %v", Oconv(int(n.Op), 0))
+ Fatalf("funcargs out %v", Oconv(int(n.Op), 0))
}
if n.Left == nil {
*/
func funcargs2(t *Type) {
if t.Etype != TFUNC {
- Fatal("funcargs2 %v", t)
+ Fatalf("funcargs2 %v", t)
}
if t.Thistuple != 0 {
func funcbody(n *Node) {
// change the declaration context from auto to extern
if dclcontext != PAUTO {
- Fatal("funcbody: dclcontext")
+ Fatalf("funcbody: dclcontext")
}
popdcl()
Funcdepth--
lineno = n.Lineno
if n.Op != ODCLFIELD {
- Fatal("structfield: oops %v\n", n)
+ Fatalf("structfield: oops %v\n", n)
}
f := typ(TFIELD)
lineno = n.Lineno
if n.Op != ODCLFIELD {
- Fatal("interfacefield: oops %v\n", n)
+ Fatalf("interfacefield: oops %v\n", n)
}
if n.Val().Ctype() != CTxxx {
func addmethod(sf *Sym, t *Type, local bool, nointerface bool) {
// get field sym
if sf == nil {
- Fatal("no method symbol")
+ Fatalf("no method symbol")
}
// get parent type sym
for f := pa.Method; f != nil; f = f.Down {
d = f
if f.Etype != TFIELD {
- Fatal("addmethod: not TFIELD: %v", Tconv(f, obj.FmtLong))
+ Fatalf("addmethod: not TFIELD: %v", Tconv(f, obj.FmtLong))
}
if sf.Name != f.Sym.Name {
continue
// during import unexported method names should be in the type's package
if importpkg != nil && f.Sym != nil && !exportname(f.Sym.Name) && f.Sym.Pkg != structpkg {
- Fatal("imported method name %v in wrong package %s\n", Sconv(f.Sym, obj.FmtSign), structpkg.Name)
+ Fatalf("imported method name %v in wrong package %s\n", Sconv(f.Sym, obj.FmtSign), structpkg.Name)
}
if d == nil {
if n.Type == nil {
if nerrors == 0 {
- Fatal("funccompile missing type")
+ Fatalf("funccompile missing type")
}
return
}
checkwidth(n.Type)
if Curfn != nil {
- Fatal("funccompile %v inside %v", n.Func.Nname.Sym, Curfn.Func.Nname.Sym)
+ Fatalf("funccompile %v inside %v", n.Func.Nname.Sym, Curfn.Func.Nname.Sym)
}
Stksize = 0
return nE
}
if n.Opt() != nil {
- Fatal("nodeEscState: opt in use (%T)", n.Opt())
+ Fatalf("nodeEscState: opt in use (%T)", n.Opt())
}
nE := new(NodeEscState)
nE.Curfn = Curfn
func (e *EscState) track(n *Node) {
if Curfn == nil {
- Fatal("EscState.track: Curfn nil")
+ Fatalf("EscState.track: Curfn nil")
}
n.Esc = EscNone // until proven otherwise
nE := e.nodeEscState(n)
if e&EscMask >= EscScope {
// normalize
if e&^EscMask != 0 {
- Fatal("Escape information had unexpected return encoding bits (w/ EscScope, EscHeap, EscNever), e&EscMask=%v", e&EscMask)
+ Fatalf("Escape information had unexpected return encoding bits (w/ EscScope, EscHeap, EscNever), e&EscMask=%v", e&EscMask)
}
}
if e&EscMask > etype {
func escfunc(e *EscState, func_ *Node) {
// print("escfunc %N %s\n", func->nname, e->recursive?"(recursive)":"");
if func_.Esc != 1 {
- Fatal("repeat escfunc %v", func_.Func.Nname)
+ Fatalf("repeat escfunc %v", func_.Func.Nname)
}
func_.Esc = EscFuncStarted
switch n.Op {
case OLABEL:
if n.Left == nil || n.Left.Sym == nil {
- Fatal("esc:label without label: %v", Nconv(n, obj.FmtSign))
+ Fatalf("esc:label without label: %v", Nconv(n, obj.FmtSign))
}
// Walk will complain about this label being already defined, but that's not until
case OGOTO:
if n.Left == nil || n.Left.Sym == nil {
- Fatal("esc:goto without label: %v", Nconv(n, obj.FmtSign))
+ Fatalf("esc:goto without label: %v", Nconv(n, obj.FmtSign))
}
// If we come past one that's uninitialized, this must be a (harmless) forward jump
escassign(e, ll.N, lr.N)
}
if lr != nil || ll != nil {
- Fatal("esc oas2func")
+ Fatalf("esc oas2func")
}
case ORETURN:
}
if ll != nil {
- Fatal("esc return list")
+ Fatalf("esc return list")
}
// Argument could leak through recover.
switch dst.Op {
default:
Dump("dst", dst)
- Fatal("escassign: unexpected dst")
+ Fatalf("escassign: unexpected dst")
case OARRAYLIT,
OCLOSURE,
break
default:
- Fatal("escape mktag")
+ Fatalf("escape mktag")
}
if mask < len(tags) && tags[mask] != "" {
// so there is no need to check here.
if em != 0 && dsts == nil {
- Fatal("corrupt esc tag %q or messed up escretval list\n", note)
+ Fatalf("corrupt esc tag %q or messed up escretval list\n", note)
}
return em0
}
var fn *Node
switch n.Op {
default:
- Fatal("esccall")
+ Fatalf("esccall")
case OCALLFUNC:
fn = n.Left
// function in same mutually recursive group. Incorporate into flow graph.
// print("esc local fn: %N\n", fn->ntype);
if fn.Name.Defn.Esc == EscFuncUnknown || nE.Escretval != nil {
- Fatal("graph inconsistency")
+ Fatalf("graph inconsistency")
}
// set up out list on this call node
// Imported or completely analyzed function. Use the escape tags.
if nE.Escretval != nil {
- Fatal("esc already decorated call %v\n", Nconv(n, obj.FmtSign))
+ Fatalf("esc already decorated call %v\n", Nconv(n, obj.FmtSign))
}
if Debug['m'] > 2 {
n := s.Def
typecheck(&n, Erv)
if n == nil || n.Op != OLITERAL {
- Fatal("dumpexportconst: oconst nil: %v", s)
+ Fatalf("dumpexportconst: oconst nil: %v", s)
}
t := n.Type // may or may not be specified
b, err := obj.Bopenw(asmhdr)
if err != nil {
- Fatal("%v", err)
+ Fatalf("%v", err)
}
fmt.Fprintf(b, "// generated by %cg -asmhdr from package %s\n\n", Thearch.Thechar, localpkg.Name)
var n *Node
case FExp:
if s.Name != "" && s.Name[0] == '.' {
- Fatal("exporting synthetic symbol %s", s.Name)
+ Fatalf("exporting synthetic symbol %s", s.Name)
}
if s.Pkg != builtinpkg {
return fmt.Sprintf("@%q.%s", s.Pkg.Path, s.Name)
}
if fmtmode == FExp {
- Fatal("missing %v case during export", Econv(int(t.Etype), 0))
+ Fatalf("missing %v case during export", Econv(int(t.Etype), 0))
}
// Don't know how to handle - fall back to detailed prints.
dumpdepth--
default:
- Fatal("unhandled %%N mode")
+ Fatalf("unhandled %%N mode")
}
flag = sf
n.Name.Param.Stackparam.Type = n.Type
n.Name.Param.Stackparam.Addable = true
if n.Xoffset == BADWIDTH {
- Fatal("addrescapes before param assignment")
+ Fatalf("addrescapes before param assignment")
}
n.Name.Param.Stackparam.Xoffset = n.Xoffset
fallthrough
func cgen_proc(n *Node, proc int) {
switch n.Left.Op {
default:
- Fatal("cgen_proc: unknown call %v", Oconv(int(n.Left.Op), 0))
+ Fatalf("cgen_proc: unknown call %v", Oconv(int(n.Left.Op), 0))
case OCALLMETH:
cgen_callmeth(n.Left, proc)
}
if n.Op != ONAME {
Dump("cgen_dcl", n)
- Fatal("cgen_dcl")
+ Fatalf("cgen_dcl")
}
if n.Class&PHEAP == 0 {
Mpmovecfix(z.Val().U.(*Mpint), 0)
default:
- Fatal("clearslim called on type %v", n.Type)
+ Fatalf("clearslim called on type %v", n.Type)
}
ullmancalc(&z)
case ODOT:
if n.Xoffset == BADWIDTH {
Dump("bad width in dotoffset", n)
- Fatal("bad width in dotoffset")
+ Fatalf("bad width in dotoffset")
}
i = Dotoffset(n.Left, oary, nn)
case ODOTPTR:
if n.Xoffset == BADWIDTH {
Dump("bad width in dotoffset", n)
- Fatal("bad width in dotoffset")
+ Fatalf("bad width in dotoffset")
}
i = Dotoffset(n.Left, oary, nn)
*/
func Tempname(nn *Node, t *Type) {
if Curfn == nil {
- Fatal("no curfn for tempname")
+ Fatalf("no curfn for tempname")
}
if t == nil {
switch n.Op {
default:
- Fatal("gen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+ Fatalf("gen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
case OCASE,
OFALL,
ret:
if Anyregalloc() != wasregalloc {
Dump("node", n)
- Fatal("registers left allocated")
+ Fatalf("registers left allocated")
}
lineno = lno
l := n.Left
if l.Op != ODOTMETH {
- Fatal("cgen_callmeth: not dotmethod: %v", l)
+ Fatalf("cgen_callmeth: not dotmethod: %v", l)
}
n2 := *n
}
if lab.Gotopc != nil {
- Fatal("label %v never resolved", lab.Sym)
+ Fatalf("label %v never resolved", lab.Sym)
}
for l = lab.Use; l != nil; l = l.Next {
checkgoto(l.N, lab.Def)
nodr = *nr
if !cadable(nr) {
if nr.Ullman >= UINF && nodl.Op == OINDREG {
- Fatal("miscompile")
+ Fatalf("miscompile")
}
Igen(nr, &nodr, nil)
defer Regfree(&nodr)
visitComponents(nl.Type, 0, func(t *Type, offset int64) bool {
if wb && int(Simtype[t.Etype]) == Tptr && t != itable {
if ptrType != nil {
- Fatal("componentgen_wb %v", Tconv(nl.Type, 0))
+ Fatalf("componentgen_wb %v", Tconv(nl.Type, 0))
}
ptrType = t
ptrOffset = offset
// NOTE: Assuming little endian (signed top half at offset 4).
// We don't have any 32-bit big-endian systems.
if Thearch.Thechar != '5' && Thearch.Thechar != '8' {
- Fatal("unknown 32-bit architecture")
+ Fatalf("unknown 32-bit architecture")
}
return f(Types[TUINT32], startOffset) &&
f(Types[TINT32], startOffset+4)
// in code introduced in CL 6932045 to fix issue #4518.
// But the test case in issue 4518 does not trigger this anymore,
// so maybe this complication is no longer needed.
- Fatal("struct not at offset 0")
+ Fatalf("struct not at offset 0")
}
for field := t.Type; field != nil; field = field.Down {
if field.Etype != TFIELD {
- Fatal("bad struct")
+ Fatalf("bad struct")
}
if !visitComponents(field.Type, startOffset+field.Width, f) {
return false
func (v Val) Ctype() int {
switch x := v.U.(type) {
default:
- Fatal("unexpected Ctype for %T", v.U)
+ Fatalf("unexpected Ctype for %T", v.U)
panic("not reached")
case nil:
return 0
if as == obj.ADATA || as == obj.AGLOBL {
if ddumped != 0 {
- Fatal("already dumped data")
+ Fatalf("already dumped data")
}
if dpc == nil {
dpc = Ctxt.NewProg()
func Nodreg(n *Node, t *Type, r int) {
if t == nil {
- Fatal("nodreg: t nil")
+ Fatalf("nodreg: t nil")
}
*n = Node{}
a := a // copy to let escape into Ctxt.Dconv
Debug['h'] = 1
Dump("naddr", n)
- Fatal("naddr: bad %v %v", Oconv(int(n.Op), 0), Ctxt.Dconv(a))
+ Fatalf("naddr: bad %v %v", Oconv(int(n.Op), 0), Ctxt.Dconv(a))
case OREGISTER:
a.Type = obj.TYPE_REG
case OCLOSUREVAR:
if !Curfn.Func.Needctxt {
- Fatal("closurevar without needctxt")
+ Fatalf("closurevar without needctxt")
}
a.Type = obj.TYPE_MEM
a.Reg = int16(Thearch.REGCTXT)
a.Type = obj.TYPE_MEM
switch n.Class {
default:
- Fatal("naddr: ONAME class %v %d\n", n.Sym, n.Class)
+ Fatalf("naddr: ONAME class %v %d\n", n.Sym, n.Class)
case PEXTERN:
a.Name = obj.NAME_EXTERN
}
switch n.Val().Ctype() {
default:
- Fatal("naddr: const %v", Tconv(n.Type, obj.FmtLong))
+ Fatalf("naddr: const %v", Tconv(n.Type, obj.FmtLong))
case CTFLT:
a.Type = obj.TYPE_FCONST
}
if a.Type != obj.TYPE_MEM {
a := a // copy to let escape into Ctxt.Dconv
- Fatal("naddr: OADDR %v (from %v)", Ctxt.Dconv(a), Oconv(int(n.Left.Op), 0))
+ Fatalf("naddr: OADDR %v (from %v)", Ctxt.Dconv(a), Oconv(int(n.Left.Op), 0))
}
a.Type = obj.TYPE_ADDR
var savet Iter
first := Structfirst(&savet, &t)
if first == nil {
- Fatal("nodarg: bad struct")
+ Fatalf("nodarg: bad struct")
}
if first.Width == BADWIDTH {
- Fatal("nodarg: offset not computed for %v", t)
+ Fatalf("nodarg: offset not computed for %v", t)
}
n.Xoffset = first.Width
n.Addable = true
}
if t.Etype != TFIELD {
- Fatal("nodarg: not field %v", t)
+ Fatalf("nodarg: not field %v", t)
}
if fp == 1 {
n.Sym = t.Sym
if t.Width == BADWIDTH {
- Fatal("nodarg: offset not computed for %v", t)
+ Fatalf("nodarg: offset not computed for %v", t)
}
n.Xoffset = t.Width
n.Addable = true
n.Class = PPARAM
case 2: // offset output arg
- Fatal("shouldn't be used")
+ Fatalf("shouldn't be used")
}
n.Typecheck = 1
func Patch(p *obj.Prog, to *obj.Prog) {
if p.To.Type != obj.TYPE_BRANCH {
- Fatal("patch: not a branch")
+ Fatalf("patch: not a branch")
}
p.To.Val = to
p.To.Offset = to.Pc
func unpatch(p *obj.Prog) *obj.Prog {
if p.To.Type != obj.TYPE_BRANCH {
- Fatal("unpatch: not a branch")
+ Fatalf("unpatch: not a branch")
}
q, _ := p.To.Val.(*obj.Prog)
p.To.Val = nil
*/
func Regalloc(n *Node, t *Type, o *Node) {
if t == nil {
- Fatal("regalloc: t nil")
+ Fatalf("regalloc: t nil")
}
et := int(Simtype[t.Etype])
if Ctxt.Arch.Regsize == 4 && (et == TINT64 || et == TUINT64) {
- Fatal("regalloc 64bit")
+ Fatalf("regalloc 64bit")
}
var i int
Switch:
switch et {
default:
- Fatal("regalloc: unknown type %v", t)
+ Fatalf("regalloc: unknown type %v", t)
case TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TPTR32, TPTR64, TBOOL:
if o != nil && o.Op == OREGISTER {
}
Flusherrors()
Regdump()
- Fatal("out of fixed registers")
+ Fatalf("out of fixed registers")
case TFLOAT32, TFLOAT64:
if Thearch.Use387 {
}
Flusherrors()
Regdump()
- Fatal("out of floating registers")
+ Fatalf("out of floating registers")
case TCOMPLEX64, TCOMPLEX128:
Tempname(n, t)
return
}
if n.Op != OREGISTER && n.Op != OINDREG {
- Fatal("regfree: not a register")
+ Fatalf("regfree: not a register")
}
i := int(n.Reg)
if i == Thearch.REGSP {
Thearch.FREGMIN <= i && i <= Thearch.FREGMAX:
// ok
default:
- Fatal("regfree: reg out of range")
+ Fatalf("regfree: reg out of range")
}
i -= Thearch.REGMIN
if reg[i] <= 0 {
- Fatal("regfree: reg not allocated")
+ Fatalf("regfree: reg not allocated")
}
reg[i]--
if reg[i] == 0 {
Thearch.FREGMIN <= r && r <= Thearch.FREGMAX:
// ok
default:
- Fatal("reginuse: reg out of range")
+ Fatalf("reginuse: reg out of range")
}
return reg[r-Thearch.REGMIN] > 0
// so that a register can be given up but then reclaimed.
func Regrealloc(n *Node) {
if n.Op != OREGISTER && n.Op != OINDREG {
- Fatal("regrealloc: not a register")
+ Fatalf("regrealloc: not a register")
}
i := int(n.Reg)
if i == Thearch.REGSP {
Thearch.FREGMIN <= i && i <= Thearch.FREGMAX:
// ok
default:
- Fatal("regrealloc: reg out of range")
+ Fatalf("regrealloc: reg out of range")
}
i -= Thearch.REGMIN
rcvr = rcvr.Type
}
if rcvr.Sym == nil {
- Fatal("receiver with no sym: [%v] %v (%v)", fn.Sym, Nconv(fn, obj.FmtLong), rcvr)
+ Fatalf("receiver with no sym: [%v] %v (%v)", fn.Sym, Nconv(fn, obj.FmtLong), rcvr)
}
return rcvr.Sym.Pkg
}
// fn and ->nbody will already have been typechecked.
func caninl(fn *Node) {
if fn.Op != ODCLFUNC {
- Fatal("caninl %v", fn)
+ Fatalf("caninl %v", fn)
}
if fn.Func.Nname == nil {
- Fatal("caninl no nname %v", Nconv(fn, obj.FmtSign))
+ Fatalf("caninl no nname %v", Nconv(fn, obj.FmtSign))
}
// If fn has no body (is defined outside of Go), cannot inline it.
}
if fn.Typecheck == 0 {
- Fatal("caninl on non-typechecked function %v", fn)
+ Fatalf("caninl on non-typechecked function %v", fn)
}
// can't handle ... args yet
// Call is okay if inlinable and we have the budget for the body.
case OCALLMETH:
if n.Left.Type == nil {
- Fatal("no function type for [%p] %v\n", n.Left, Nconv(n.Left, obj.FmtSign))
+ Fatalf("no function type for [%p] %v\n", n.Left, Nconv(n.Left, obj.FmtSign))
}
if n.Left.Type.Nname == nil {
- Fatal("no function definition for [%p] %v\n", n.Left.Type, Tconv(n.Left.Type, obj.FmtSign))
+ Fatalf("no function definition for [%p] %v\n", n.Left.Type, Tconv(n.Left.Type, obj.FmtSign))
}
if n.Left.Type.Nname.Func.Inl != nil {
*budget -= int(n.Left.Type.Nname.Func.InlCost)
Curfn = fn
inlnode(&fn)
if fn != Curfn {
- Fatal("inlnode replaced curfn")
+ Fatalf("inlnode replaced curfn")
}
Curfn = savefn
}
// statements.
func inlconv2list(n *Node) *NodeList {
if n.Op != OINLCALL || n.Rlist == nil {
- Fatal("inlconv2list %v\n", Nconv(n, obj.FmtSign))
+ Fatalf("inlconv2list %v\n", Nconv(n, obj.FmtSign))
}
l := n.Rlist
// typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
if n.Left.Type == nil {
- Fatal("no function type for [%p] %v\n", n.Left, Nconv(n.Left, obj.FmtSign))
+ Fatalf("no function type for [%p] %v\n", n.Left, Nconv(n.Left, obj.FmtSign))
}
if n.Left.Type.Nname == nil {
- Fatal("no function definition for [%p] %v\n", n.Left.Type, Tconv(n.Left.Type, obj.FmtSign))
+ Fatalf("no function definition for [%p] %v\n", n.Left.Type, Tconv(n.Left.Type, obj.FmtSign))
}
mkinlcall(np, n.Left.Type.Nname, n.Isddd)
func tinlvar(t *Type) *Node {
if t.Nname != nil && !isblank(t.Nname) {
if t.Nname.Name.Inlvar == nil {
- Fatal("missing inlvar for %v\n", t.Nname)
+ Fatalf("missing inlvar for %v\n", t.Nname)
}
return t.Nname.Name.Inlvar
}
t := getthisx(fn.Type).Type
if t != nil && t.Nname != nil && !isblank(t.Nname) && t.Nname.Name.Inlvar == nil {
- Fatal("missing inlvar for %v\n", t.Nname)
+ Fatalf("missing inlvar for %v\n", t.Nname)
}
if n.Left.Left == nil {
- Fatal("method call without receiver: %v", Nconv(n, obj.FmtSign))
+ Fatalf("method call without receiver: %v", Nconv(n, obj.FmtSign))
}
if t == nil {
- Fatal("method call unknown receiver type: %v", Nconv(n, obj.FmtSign))
+ Fatalf("method call unknown receiver type: %v", Nconv(n, obj.FmtSign))
}
as = Nod(OAS, tinlvar(t), n.Left.Left)
if as != nil {
if fn.Type.Thistuple != 0 && n.Left.Op != ODOTMETH {
// non-method call to method
if n.List == nil {
- Fatal("non-method call to method without first arg: %v", Nconv(n, obj.FmtSign))
+ Fatalf("non-method call to method without first arg: %v", Nconv(n, obj.FmtSign))
}
// append receiver inlvar to LHS.
t := getthisx(fn.Type).Type
if t != nil && t.Nname != nil && !isblank(t.Nname) && t.Nname.Name.Inlvar == nil {
- Fatal("missing inlvar for %v\n", t.Nname)
+ Fatalf("missing inlvar for %v\n", t.Nname)
}
if t == nil {
- Fatal("method call unknown receiver type: %v", Nconv(n, obj.FmtSign))
+ Fatalf("method call unknown receiver type: %v", Nconv(n, obj.FmtSign))
}
as.List = list(as.List, tinlvar(t))
ll = ll.Next // track argument count.
}
if ll != nil || t != nil {
- Fatal("arg count mismatch: %v vs %v\n", Tconv(getinargx(fn.Type), obj.FmtSharp), Hconv(n.List, obj.FmtComma))
+ Fatalf("arg count mismatch: %v vs %v\n", Tconv(getinargx(fn.Type), obj.FmtSharp), Hconv(n.List, obj.FmtComma))
}
}
m.Ninit = nil
if n.Op == OCLOSURE {
- Fatal("cannot inline function containing closure: %v", Nconv(n, obj.FmtSign))
+ Fatalf("cannot inline function containing closure: %v", Nconv(n, obj.FmtSign))
}
m.Left = inlsubst(n.Left)
Thearch.Betypeinit()
if Widthptr == 0 {
- Fatal("betypeinit failed")
+ Fatalf("betypeinit failed")
}
lexinit()
etype = syms[i].etype
if etype != Txxx {
if etype < 0 || etype >= len(Types) {
- Fatal("lexinit: %s bad etype", s.Name)
+ Fatalf("lexinit: %s bad etype", s.Name)
}
s1 = Pkglookup(syms[i].name, builtinpkg)
t = Types[etype]
}
if n.Type == nil {
- Fatal("external %v nil type\n", n)
+ Fatalf("external %v nil type\n", n)
}
if n.Class == PFUNC {
continue
ggloblsym(sym, int32(off), obj.NOPTR|obj.LOCAL)
if nam.Op != ONAME {
- Fatal("slicebytes %v", nam)
+ Fatalf("slicebytes %v", nam)
}
off = int(nam.Xoffset)
off = dsymptr(nam.Sym, off, sym, 0)
return a
}
- Fatal("ordersafeexpr %v", Oconv(int(n.Op), 0))
+ Fatalf("ordersafeexpr %v", Oconv(int(n.Op), 0))
return nil // not reached
}
// and then returns the list t1, t2, ....
func copyret(n *Node, order *Order) *NodeList {
if n.Type.Etype != TSTRUCT || n.Type.Funarg == 0 {
- Fatal("copyret %v %d", n.Type, n.Left.Type.Outtuple)
+ Fatalf("copyret %v %d", n.Type, n.Left.Type.Outtuple)
}
var l1 *NodeList
func ordermapassign(n *Node, order *Order) {
switch n.Op {
default:
- Fatal("ordermapassign %v", Oconv(int(n.Op), 0))
+ Fatalf("ordermapassign %v", Oconv(int(n.Op), 0))
case OAS:
order.out = list(order.out, n)
switch n.Op {
default:
- Fatal("orderstmt %v", Oconv(int(n.Op), 0))
+ Fatalf("orderstmt %v", Oconv(int(n.Op), 0))
case OVARKILL:
order.out = list(order.out, n)
orderexpr(&n.Right, order, nil)
switch n.Type.Etype {
default:
- Fatal("orderstmt range %v", n.Type)
+ Fatalf("orderstmt range %v", n.Type)
// Mark []byte(str) range expression to reuse string backing storage.
// It is safe because the storage cannot be mutated.
var r *Node
for l := n.List; l != nil; l = l.Next {
if l.N.Op != OXCASE {
- Fatal("order select case %v", Oconv(int(l.N.Op), 0))
+ Fatalf("order select case %v", Oconv(int(l.N.Op), 0))
}
r = l.N.Left
setlineno(l.N)
// Append any new body prologue to ninit.
// The next loop will insert ninit into nbody.
if l.N.Ninit != nil {
- Fatal("order select ninit")
+ Fatalf("order select ninit")
}
if r != nil {
switch r.Op {
orderexpr(&n.Left, order, nil)
for l := n.List; l != nil; l = l.Next {
if l.N.Op != OXCASE {
- Fatal("order switch case %v", Oconv(int(l.N.Op), 0))
+ Fatalf("order switch case %v", Oconv(int(l.N.Op), 0))
}
orderexprlistinplace(l.N.List, order)
orderblock(&l.N.Nbody)
func gvardefx(n *Node, as int) {
if n == nil {
- Fatal("gvardef nil")
+ Fatalf("gvardef nil")
}
if n.Op != ONAME {
Yyerror("gvardef %v; %v", Oconv(int(n.Op), obj.FmtSharp), n)
func gcsymdup(s *Sym) {
ls := Linksym(s)
if len(ls.R) > 0 {
- Fatal("cannot rosymdup %s with relocations", ls.Name)
+ Fatalf("cannot rosymdup %s with relocations", ls.Name)
}
ls.Name = fmt.Sprintf("gclocals·%x", md5.Sum(ls.P))
ls.Dupok = 1
dowidth(n.Type)
w = n.Type.Width
if w >= Thearch.MAXWIDTH || w < 0 {
- Fatal("bad width")
+ Fatalf("bad width")
}
Stksize += w
Stksize = Rnd(Stksize, int64(n.Type.Align))
// Ideally we wouldn't see any integer types here, but we do.
if n.Type == nil || (!Isptr[n.Type.Etype] && !Isint[n.Type.Etype] && n.Type.Etype != TUNSAFEPTR) {
Dump("checknil", n)
- Fatal("bad checknil")
+ Fatalf("bad checknil")
}
if ((Thearch.Thechar == '5' || Thearch.Thechar == '7' || Thearch.Thechar == '9') && n.Op != OREGISTER) || !n.Addable || n.Op == OLITERAL {
func xmalloc(size uint32) interface{} {
result := (interface{})(make([]byte, size))
if result == nil {
- Fatal("malloc failed")
+ Fatalf("malloc failed")
}
return result
}
// Constructs a new basic block containing a single instruction.
func newblock(prog *obj.Prog) *BasicBlock {
if prog == nil {
- Fatal("newblock: prog cannot be nil")
+ Fatalf("newblock: prog cannot be nil")
}
result := new(BasicBlock)
result.rpo = -1
// Frees a basic block and all of its leaf data structures.
func freeblock(bb *BasicBlock) {
if bb == nil {
- Fatal("freeblock: cannot free nil")
+ Fatalf("freeblock: cannot free nil")
}
}
// to a successor of from.
func addedge(from *BasicBlock, to *BasicBlock) {
if from == nil {
- Fatal("addedge: from is nil")
+ Fatalf("addedge: from is nil")
}
if to == nil {
- Fatal("addedge: to is nil")
+ Fatalf("addedge: to is nil")
}
from.succ = append(from.succ, to)
to.pred = append(to.pred, from)
// is a call to a specific package qualified function name.
func iscall(prog *obj.Prog, name *obj.LSym) bool {
if prog == nil {
- Fatal("iscall: prog is nil")
+ Fatalf("iscall: prog is nil")
}
if name == nil {
- Fatal("iscall: function name is nil")
+ Fatalf("iscall: function name is nil")
}
if prog.As != obj.ACALL {
return false
pred := selectgo
for {
if len(pred.pred) == 0 {
- Fatal("selectgo does not have a newselect")
+ Fatalf("selectgo does not have a newselect")
}
pred = pred.pred[0]
if blockany(pred, isselectcommcasecall) {
// A select comm case block should have exactly one
// successor.
if len(pred.succ) != 1 {
- Fatal("select comm case has too many successors")
+ Fatalf("select comm case has too many successors")
}
succ = pred.succ[0]
// and the branch should lead to the select case
// statements block.
if len(succ.succ) != 2 {
- Fatal("select comm case successor has too many successors")
+ Fatalf("select comm case successor has too many successors")
}
// Add the block as a successor of the selectgo block.
Thearch.Proginfo(p)
if p.To.Type == obj.TYPE_BRANCH {
if p.To.Val == nil {
- Fatal("prog branch to nil")
+ Fatalf("prog branch to nil")
}
if p.To.Val.(*obj.Prog).Opt == nil {
p.To.Val.(*obj.Prog).Opt = newblock(p.To.Val.(*obj.Prog))
if bb.rpo == -1 {
fmt.Printf("newcfg: unreachable basic block for %v\n", bb.last)
printcfg(cfg)
- Fatal("newcfg: invalid control flow graph")
+ Fatalf("newcfg: invalid control flow graph")
}
return cfg
goto Next
}
if pos >= int32(len(vars)) || vars[pos] != from.Node {
- Fatal("bad bookkeeping in liveness %v %d", Nconv(from.Node.(*Node), 0), pos)
+ Fatalf("bad bookkeeping in liveness %v %d", Nconv(from.Node.(*Node), 0), pos)
}
if ((from.Node).(*Node)).Addrtaken {
bvset(avarinit, pos)
return
}
if pos >= int32(len(vars)) || vars[pos] != to.Node {
- Fatal("bad bookkeeping in liveness %v %d", Nconv(to.Node.(*Node), 0), pos)
+ Fatalf("bad bookkeeping in liveness %v %d", Nconv(to.Node.(*Node), 0), pos)
}
if ((to.Node).(*Node)).Addrtaken {
if prog.As != obj.AVARKILL {
// Frees the liveness structure and all of its leaf data structures.
func freeliveness(lv *Liveness) {
if lv == nil {
- Fatal("freeliveness: cannot free nil")
+ Fatalf("freeliveness: cannot free nil")
}
}
// accounts for 40% of the 6g execution time.
func onebitwalktype1(t *Type, xoffset *int64, bv Bvec) {
if t.Align > 0 && *xoffset&int64(t.Align-1) != 0 {
- Fatal("onebitwalktype1: invalid initial alignment, %v", t)
+ Fatalf("onebitwalktype1: invalid initial alignment, %v", t)
}
switch t.Etype {
TCHAN,
TMAP:
if *xoffset&int64(Widthptr-1) != 0 {
- Fatal("onebitwalktype1: invalid alignment, %v", t)
+ Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bvset(bv, int32(*xoffset/int64(Widthptr))) // pointer
*xoffset += t.Width
case TSTRING:
// struct { byte *str; intgo len; }
if *xoffset&int64(Widthptr-1) != 0 {
- Fatal("onebitwalktype1: invalid alignment, %v", t)
+ Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bvset(bv, int32(*xoffset/int64(Widthptr))) //pointer in first slot
*xoffset += t.Width
// or, when isnilinter(t)==true:
// struct { Type *type; void *data; }
if *xoffset&int64(Widthptr-1) != 0 {
- Fatal("onebitwalktype1: invalid alignment, %v", t)
+ Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bvset(bv, int32(*xoffset/int64(Widthptr))) // pointer in first slot
bvset(bv, int32(*xoffset/int64(Widthptr)+1)) // pointer in second slot
// The value of t->bound is -1 for slices types and >=0 for
// for fixed array types. All other values are invalid.
if t.Bound < -1 {
- Fatal("onebitwalktype1: invalid bound, %v", t)
+ Fatalf("onebitwalktype1: invalid bound, %v", t)
}
if Isslice(t) {
// struct { byte *array; uintgo len; uintgo cap; }
if *xoffset&int64(Widthptr-1) != 0 {
- Fatal("onebitwalktype1: invalid TARRAY alignment, %v", t)
+ Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
}
bvset(bv, int32(*xoffset/int64(Widthptr))) // pointer in first slot (BitsPointer)
*xoffset += t.Width
*xoffset += t.Width - o
default:
- Fatal("onebitwalktype1: unexpected type, %v", t)
+ Fatalf("onebitwalktype1: unexpected type, %v", t)
}
}
if pos < 0 {
// the first block we encounter should have the ATEXT so
// at no point should pos ever be less than zero.
- Fatal("livenessepilogue")
+ Fatalf("livenessepilogue")
}
bvcopy(livein, bb.liveout)
if p.To.Type == obj.TYPE_BRANCH {
if p.To.Val == nil {
- Fatal("pnil %v", p)
+ Fatalf("pnil %v", p)
}
f1 = p.To.Val.(*obj.Prog).Opt.(*Flow)
if f1 == nil {
- Fatal("fnil %v / %v", p, p.To.Val.(*obj.Prog))
+ Fatalf("fnil %v / %v", p, p.To.Val.(*obj.Prog))
}
if f1 == f {
//fatal("self loop %v", p);
for rpo1 < rpo2 {
t = idom[rpo2]
if t >= rpo2 {
- Fatal("bad idom")
+ Fatalf("bad idom")
}
rpo2 = t
}
d := postorder(g.Start, rpo2r, 0)
nr := int32(g.Num)
if d > nr {
- Fatal("too many reg nodes %d %d", d, nr)
+ Fatalf("too many reg nodes %d %d", d, nr)
}
nr = d
var r1 *Flow
for f := g.Start; f != nil; f = f.Link {
p := f.Prog
if p.From.Node != nil && ((p.From.Node).(*Node)).Opt() != nil && p.To.Node != nil && ((p.To.Node).(*Node)).Opt() != nil {
- Fatal("double node %v", p)
+ Fatalf("double node %v", p)
}
v = nil
n, _ = p.From.Node.(*Node)
fmt.Printf("drop write-only %v\n", v.node.Sym)
}
} else {
- Fatal("temp used and not set: %v", p)
+ Fatalf("temp used and not set: %v", p)
}
nkill++
continue
}
setlineno(n)
if init == nil {
- Fatal("racewalk: bad init list")
+ Fatalf("racewalk: bad init list")
}
if init == &n.Ninit {
// If init == &n->ninit and n->ninit is non-nil,
switch n.Op {
default:
- Fatal("racewalk: unknown node type %v", Oconv(int(n.Op), 0))
+ Fatalf("racewalk: unknown node type %v", Oconv(int(n.Op), 0))
case OAS, OASWB, OAS2FUNC:
racewalknode(&n.Left, init, 1, 0)
var init *NodeList
switch t.Etype {
default:
- Fatal("walkrange")
+ Fatalf("walkrange")
// Lower n into runtime·memclr if possible, for
// fast zeroing of slices and arrays (issue 5373).
var method *Sym
for f := mt.Xmethod; f != nil; f = f.Down {
if f.Etype != TFIELD {
- Fatal("methods: not field %v", f)
+ Fatalf("methods: not field %v", f)
}
if f.Type.Etype != TFUNC || f.Type.Thistuple == 0 {
- Fatal("non-method on %v method %v %v\n", mt, f.Sym, f)
+ Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f)
}
if getthisx(f.Type).Type == nil {
- Fatal("receiver with no type on %v method %v %v\n", mt, f.Sym, f)
+ Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f)
}
if f.Nointerface {
continue
a.name = method.Name
if !exportname(method.Name) {
if method.Pkg == nil {
- Fatal("methods: missing package")
+ Fatalf("methods: missing package")
}
a.pkg = method.Pkg
}
var last *Sig
for f := t.Type; f != nil; f = f.Down {
if f.Etype != TFIELD {
- Fatal("imethods: not field")
+ Fatalf("imethods: not field")
}
if f.Type.Etype != TFUNC || f.Sym == nil {
continue
a.name = method.Name
if !exportname(method.Name) {
if method.Pkg == nil {
- Fatal("imethods: missing package")
+ Fatalf("imethods: missing package")
}
a.pkg = method.Pkg
}
a.type_ = methodfunc(f.Type, nil)
if last != nil && sigcmp(last, a) >= 0 {
- Fatal("sigcmp vs sortinter %s %s", last.name, a.name)
+ Fatalf("sigcmp vs sortinter %s %s", last.name, a.name)
}
if last == nil {
all = a
ret = true
case TFIELD:
- Fatal("haspointers: unexpected type, %v", t)
+ Fatalf("haspointers: unexpected type, %v", t)
}
t.Haspointers = 1 + uint8(obj.Bool2int(ret))
return lastPtrField.Width + typeptrdata(lastPtrField.Type)
default:
- Fatal("typeptrdata: unexpected type, %v", t)
+ Fatalf("typeptrdata: unexpected type, %v", t)
return 0
}
}
func dcommontype(s *Sym, ot int, t *Type) int {
if ot != 0 {
- Fatal("dcommontype %d", ot)
+ Fatalf("dcommontype %d", ot)
}
sizeofAlg := 2 * Widthptr
i = 1
}
if i&(i-1) != 0 {
- Fatal("invalid alignment %d for %v", t.Align, t)
+ Fatalf("invalid alignment %d for %v", t.Align, t)
}
ot = duint8(s, ot, t.Align) // align
ot = duint8(s, ot, t.Align) // fieldAlign
func typenamesym(t *Type) *Sym {
if t == nil || (Isptr[t.Etype] && t.Type == nil) || isideal(t) {
- Fatal("typename %v", t)
+ Fatalf("typename %v", t)
}
s := typesym(t)
if s.Def == nil {
case TARRAY:
if Isslice(t) {
- Fatal("slice can't be a map key: %v", t)
+ Fatalf("slice can't be a map key: %v", t)
}
return isreflexive(t.Type)
return true
default:
- Fatal("bad type for map key: %v", t)
+ Fatalf("bad type for map key: %v", t)
return false
}
}
}
if isideal(t) {
- Fatal("dtypesym %v", t)
+ Fatalf("dtypesym %v", t)
}
s := typesym(t)
func dgcprog(t *Type) (*Sym, int64) {
dowidth(t)
if t.Width == BADWIDTH {
- Fatal("dgcprog: %v badwidth", t)
+ Fatalf("dgcprog: %v badwidth", t)
}
sym := typesymprefix(".gcprog", t)
var p GCProg
offset := p.w.BitIndex() * int64(Widthptr)
p.end()
if ptrdata := typeptrdata(t); offset < ptrdata || offset > t.Width {
- Fatal("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width)
+ Fatalf("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width)
}
return sym, offset
}
}
switch t.Etype {
default:
- Fatal("GCProg.emit: unexpected type %v", t)
+ Fatalf("GCProg.emit: unexpected type %v", t)
case TSTRING:
p.w.Ptr(offset / int64(Widthptr))
}
if t.Bound == 0 {
// should have been handled by haspointers check above
- Fatal("GCProg.emit: empty array")
+ Fatalf("GCProg.emit: empty array")
}
// Flatten array-of-array-of-array to just a big array by multiplying counts.
}
node = node.Orig
if node.Orig != node {
- Fatal("%v: bad node", Ctxt.Dconv(a))
+ Fatalf("%v: bad node", Ctxt.Dconv(a))
}
if node.Sym == nil || node.Sym.Name[0] == '.' {
return zbits
o := a.Offset
w := a.Width
if w < 0 {
- Fatal("bad width %d for %v", w, Ctxt.Dconv(a))
+ Fatalf("bad width %d for %v", w, Ctxt.Dconv(a))
}
flag := 0
if nvar >= NVAR {
if Debug['w'] > 1 && node != nil {
- Fatal("variable not optimized: %v", Nconv(node, obj.FmtSharp))
+ Fatalf("variable not optimized: %v", Nconv(node, obj.FmtSharp))
}
if Debug['v'] > 0 {
Warn("variable not optimized: %v", Nconv(node, obj.FmtSharp))
r.regno = 0
switch v.etype {
default:
- Fatal("unknown etype %d/%v", Bitno(b), Econv(int(v.etype), 0))
+ Fatalf("unknown etype %d/%v", Bitno(b), Econv(int(v.etype), 0))
case TINT8,
TUINT8,
// Currently we never generate three register forms.
// If we do, this will need to change.
if p.From3Type() != obj.TYPE_NONE {
- Fatal("regopt not implemented for from3")
+ Fatalf("regopt not implemented for from3")
}
bit = mkvar(f, &p.To)
}
}
- Fatal("bad in bnum")
+ Fatalf("bad in bnum")
return 0
}
}
// Bitno reports the lowest index of a 1 bit in b.
-// It calls Fatal if there is no 1 bit.
+// It calls Fatalf if there is no 1 bit.
func Bitno(b uint64) int {
if b == 0 {
- Fatal("bad in bitno")
+ Fatalf("bad in bitno")
}
n := 0
if b&(1<<32-1) == 0 {
ncase = l.N
setlineno(ncase)
if ncase.Op != OXCASE {
- Fatal("typecheckselect %v", Oconv(int(ncase.Op), 0))
+ Fatalf("typecheckselect %v", Oconv(int(ncase.Op), 0))
}
if ncase.List == nil {
func walkselect(sel *Node) {
if sel.List == nil && sel.Xoffset != 0 {
- Fatal("double walkselect") // already rewrote
+ Fatalf("double walkselect") // already rewrote
}
lno := int(setlineno(sel))
var ch *Node
switch n.Op {
default:
- Fatal("select %v", Oconv(int(n.Op), 0))
+ Fatalf("select %v", Oconv(int(n.Op), 0))
// ok already
case OSEND:
r.Ninit = cas.Ninit
switch n.Op {
default:
- Fatal("select %v", Oconv(int(n.Op), 0))
+ Fatalf("select %v", Oconv(int(n.Op), 0))
// if selectnbsend(c, v) { body } else { default body }
case OSEND:
} else {
switch n.Op {
default:
- Fatal("select %v", Oconv(int(n.Op), 0))
+ Fatalf("select %v", Oconv(int(n.Op), 0))
// selectsend(sel *byte, hchan *chan any, elem *any) (selected bool);
case OSEND:
l = initlist
initlist = l.Next
if l.N != n {
- Fatal("bad initlist")
+ Fatalf("bad initlist")
}
n.Initorder = InitDone
bad:
Dump("defn", n.Name.Defn)
- Fatal("init1: bad defn")
+ Fatalf("init1: bad defn")
}
// recurse over n, doing init1 everywhere.
}
if n.Op == ONAME && n.Ninit != nil {
- Fatal("name %v with ninit: %v\n", n.Sym, Nconv(n, obj.FmtSign))
+ Fatalf("name %v with ninit: %v\n", n.Sym, Nconv(n, obj.FmtSign))
}
init1(n, out)
*/
func staticinit(n *Node, out **NodeList) bool {
if n.Op != ONAME || n.Class != PEXTERN || n.Name.Defn == nil || n.Name.Defn.Op != OAS {
- Fatal("staticinit")
+ Fatalf("staticinit")
}
lineno = n.Lineno
for nl := n.List; nl != nil; nl = nl.Next {
r = nl.N
if r.Op != OKEY {
- Fatal("structlit: rhs not OKEY: %v", r)
+ Fatalf("structlit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
if pass == 1 {
walkexpr(&a, init) // add any assignments in r to top
if a.Op != OAS {
- Fatal("structlit: not as")
+ Fatalf("structlit: not as")
}
a.Dodata = 2
} else {
for l := n.List; l != nil; l = l.Next {
r = l.N
if r.Op != OKEY {
- Fatal("arraylit: rhs not OKEY: %v", r)
+ Fatalf("arraylit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
if pass == 1 {
walkexpr(&a, init)
if a.Op != OAS {
- Fatal("arraylit: not as")
+ Fatalf("arraylit: not as")
}
a.Dodata = 2
} else {
for l := n.List; l != nil; l = l.Next {
r = l.N
if r.Op != OKEY {
- Fatal("slicelit: rhs not OKEY: %v", r)
+ Fatalf("slicelit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
r = l.N
if r.Op != OKEY {
- Fatal("maplit: rhs not OKEY: %v", r)
+ Fatalf("maplit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
r = l.N
if r.Op != OKEY {
- Fatal("maplit: rhs not OKEY: %v", r)
+ Fatalf("maplit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
r = l.N
if r.Op != OKEY {
- Fatal("maplit: rhs not OKEY: %v", r)
+ Fatalf("maplit: rhs not OKEY: %v", r)
}
index = r.Left
value = r.Right
t := n.Type
switch n.Op {
default:
- Fatal("anylit: not lit")
+ Fatalf("anylit: not lit")
case OPTRLIT:
if !Isptr[t.Etype] {
- Fatal("anylit: not ptr")
+ Fatalf("anylit: not ptr")
}
var r *Node
case OSTRUCTLIT:
if t.Etype != TSTRUCT {
- Fatal("anylit: not struct")
+ Fatalf("anylit: not struct")
}
if simplename(var_) && count(n.List) > 4 {
case OARRAYLIT:
if t.Etype != TARRAY {
- Fatal("anylit: not array")
+ Fatalf("anylit: not array")
}
if t.Bound < 0 {
slicelit(ctxt, n, var_, init)
case OMAPLIT:
if t.Etype != TMAP {
- Fatal("anylit: not map")
+ Fatalf("anylit: not map")
}
maplit(ctxt, n, var_, init)
}
initplans[n] = p
switch n.Op {
default:
- Fatal("initplan")
+ Fatalf("initplan")
case OARRAYLIT:
var a *Node
for l := n.List; l != nil; l = l.Next {
a = l.N
if a.Op != OKEY || !Smallintconst(a.Left) {
- Fatal("initplan arraylit")
+ Fatalf("initplan arraylit")
}
addvalue(p, n.Type.Type.Width*Mpgetfix(a.Left.Val().U.(*Mpint)), nil, a.Right)
}
for l := n.List; l != nil; l = l.Next {
a = l.N
if a.Op != OKEY || a.Left.Type == nil {
- Fatal("initplan structlit")
+ Fatalf("initplan structlit")
}
addvalue(p, a.Left.Type.Width, nil, a.Right)
}
for l := n.List; l != nil; l = l.Next {
a = l.N
if a.Op != OKEY {
- Fatal("initplan maplit")
+ Fatalf("initplan maplit")
}
addvalue(p, -1, a.Left, a.Right)
}
switch n.Val().Ctype() {
default:
Dump("unexpected literal", n)
- Fatal("iszero")
+ Fatalf("iszero")
case CTNIL:
return true
no:
if n.Dodata == 2 {
Dump("\ngen_as_init", n)
- Fatal("gen_as_init couldnt make data statement")
+ Fatalf("gen_as_init couldnt make data statement")
}
return false
}
}
-func Fatal(fmt_ string, args ...interface{}) {
+func Fatalf(fmt_ string, args ...interface{}) {
Flusherrors()
fmt.Printf("%v: internal compiler error: ", Ctxt.Line(int(lineno)))
s1.Block = s.Block
if s1.Def.Name == nil {
Dump("s1def", s1.Def)
- Fatal("missing Name")
+ Fatalf("missing Name")
}
s1.Def.Name.Pack = pack
s1.Origpkg = opkg
// the last field, total gives the size of the enclosing struct.
func ispaddedfield(t *Type, total int64) bool {
if t.Etype != TFIELD {
- Fatal("ispaddedfield called non-field %v", t)
+ Fatalf("ispaddedfield called non-field %v", t)
}
if t.Down == nil {
return t.Width+t.Type.Width != total
return ret
}
- Fatal("algtype1: unexpected type %v", t)
+ Fatalf("algtype1: unexpected type %v", t)
return 0
}
n.Type = t
if Isfloat[t.Etype] {
- Fatal("nodconst: bad type %v", t)
+ Fatalf("nodconst: bad type %v", t)
}
}
}
if m.Name != nil && n.Op != ODCLFIELD {
Dump("treecopy", n)
- Fatal("treecopy Name")
+ Fatalf("treecopy Name")
}
case ONONAME:
return TFLOAT64
}
- Fatal("cplxsubtype: %v\n", Econv(int(et), 0))
+ Fatalf("cplxsubtype: %v\n", Econv(int(et), 0))
return 0
}
t2 = t2.Type
for ; t1 != nil && t2 != nil; t1, t2 = t1.Down, t2.Down {
if t1.Etype != TFIELD || t2.Etype != TFIELD {
- Fatal("struct/interface missing field: %v %v", t1, t2)
+ Fatalf("struct/interface missing field: %v %v", t1, t2)
}
if t1.Sym != t2.Sym || t1.Embedded != t2.Embedded || !eqtype1(t1.Type, t2.Type, &l) || !eqnote(t1.Note, t2.Note) {
return false
t2 = t2.Type
for ; t1 != nil && t2 != nil; t1, t2 = t1.Down, t2.Down {
if t1.Etype != TSTRUCT || t2.Etype != TSTRUCT {
- Fatal("func missing struct: %v %v", t1, t2)
+ Fatalf("func missing struct: %v %v", t1, t2)
}
// Loop over fields in structs, ignoring argument names.
tb := t2.Type
for ; ta != nil && tb != nil; ta, tb = ta.Down, tb.Down {
if ta.Etype != TFIELD || tb.Etype != TFIELD {
- Fatal("func struct missing field: %v %v", ta, tb)
+ Fatalf("func struct missing field: %v %v", ta, tb)
}
if ta.Isddd != tb.Isddd || !eqtype1(ta.Type, tb.Type, &l) {
return false
}
substAny(&n.Type, &types)
if len(types) > 0 {
- Fatal("substArgTypes: too many argument types")
+ Fatalf("substArgTypes: too many argument types")
}
}
}
if t.Etype == TANY && t.Copyany != 0 {
if len(*types) == 0 {
- Fatal("substArgTypes: not enough argument types")
+ Fatalf("substArgTypes: not enough argument types")
}
*tp = (*types)[0]
*types = (*types)[1:]
func syslook(name string, copy int) *Node {
s := Pkglookup(name, Runtimepkg)
if s == nil || s.Def == nil {
- Fatal("syslook: can't find runtime.%s", name)
+ Fatalf("syslook: can't find runtime.%s", name)
}
if copy == 0 {
// The returned struct must not be modified.
func Ptrto(t *Type) *Type {
if Tptr == 0 {
- Fatal("ptrto: no tptr")
+ Fatalf("ptrto: no tptr")
}
// Reduce allocations by pre-creating common cases.
if !initPtrtoDone {
}
if t.Etype != TFIELD {
- Fatal("structfirst: not field %v", t)
+ Fatalf("structfirst: not field %v", t)
}
s.T = t
return t
bad:
- Fatal("structfirst: not struct %v", n)
+ Fatalf("structfirst: not struct %v", n)
return nil
}
}
if t.Etype != TFIELD {
- Fatal("structnext: not struct %v", n)
+ Fatalf("structnext: not struct %v", n)
return nil
}
return fp
bad:
- Fatal("funcfirst: not func %v", t)
+ Fatalf("funcfirst: not func %v", t)
return nil
}
func getthis(t *Type) **Type {
if t.Etype != TFUNC {
- Fatal("getthis: not a func %v", t)
+ Fatalf("getthis: not a func %v", t)
}
return &t.Type
}
func Getoutarg(t *Type) **Type {
if t.Etype != TFUNC {
- Fatal("getoutarg: not a func %v", t)
+ Fatalf("getoutarg: not a func %v", t)
}
return &t.Type.Down
}
func getinarg(t *Type) **Type {
if t.Etype != TFUNC {
- Fatal("getinarg: not a func %v", t)
+ Fatalf("getinarg: not a func %v", t)
}
return &t.Type.Down.Down
}
case OGE:
return OLT
}
- Fatal("brcom: no com for %v\n", Oconv(a, 0))
+ Fatalf("brcom: no com for %v\n", Oconv(a, 0))
return a
}
case OGE:
return OLE
}
- Fatal("brrev: no rev for %v\n", Oconv(a, 0))
+ Fatalf("brrev: no rev for %v\n", Oconv(a, 0))
return a
}
// make a copy; must not be used as an lvalue
if islvalue(n) {
- Fatal("missing lvalue case in safeexpr: %v", n)
+ Fatalf("missing lvalue case in safeexpr: %v", n)
}
return cheapexpr(n, init)
}
dowidth(t)
w := t.Argwid
if w >= Thearch.MAXWIDTH {
- Fatal("bad argwid %v", t)
+ Fatalf("bad argwid %v", t)
}
w += int64(extra)
if w >= Thearch.MAXWIDTH {
- Fatal("bad argwid %d + %v", extra, t)
+ Fatalf("bad argwid %d + %v", extra, t)
}
if w > Maxarg {
Maxarg = w
a := algtype1(t, nil)
switch a {
case AMEM:
- Fatal("hashfor with AMEM type")
+ Fatalf("hashfor with AMEM type")
case AINTER:
sym = Pkglookup("interhash", Runtimepkg)
// so t must be either an array or a struct.
switch t.Etype {
default:
- Fatal("genhash %v", t)
+ Fatalf("genhash %v", t)
case TARRAY:
if Isslice(t) {
- Fatal("genhash %v", t)
+ Fatalf("genhash %v", t)
}
// An array of pure memory would be handled by the
// so t must be either an array or a struct.
switch t.Etype {
default:
- Fatal("geneq %v", t)
+ Fatalf("geneq %v", t)
case TARRAY:
if Isslice(t) {
- Fatal("geneq %v", t)
+ Fatalf("geneq %v", t)
}
// An array of pure memory would be handled by the
n := l.N
setlineno(n)
if n.Op != OXCASE {
- Fatal("casebody %v", Oconv(int(n.Op), 0))
+ Fatalf("casebody %v", Oconv(int(n.Op), 0))
}
n.Op = OCASE
needvar := count(n.List) != 1 || n.List.N.Op == OLITERAL
for _, c := range cc {
n := c.node
if c.typ != caseKindTypeConst {
- Fatal("typeSwitch walkCases")
+ Fatalf("typeSwitch walkCases")
}
a := Nod(OIF, nil, nil)
a.Left = Nod(OEQ, s.hashname, Nodintconst(int64(c.hash)))
if n.hasVal == -1 {
Debug['h'] = 1
Dump("have Opt", n)
- Fatal("have Opt")
+ Fatalf("have Opt")
}
n.hasVal = +1
n.E = v.U
if n.hasVal == +1 {
Debug['h'] = 1
Dump("have Val", n)
- Fatal("have Val")
+ Fatalf("have Val")
}
n.hasVal = -1
n.E = x
func typecheck(np **Node, top int) *Node {
// cannot type check until all the source has been parsed
if typecheckok == 0 {
- Fatal("early typecheck")
+ Fatalf("early typecheck")
}
n := *np
n.Typecheck = 1
if typecheck_tcstack != l {
- Fatal("typecheck stack out of sync")
+ Fatalf("typecheck stack out of sync")
}
typecheck_tcstack = l.Next
l.Next = typecheck_tcfree
default:
Dump("typecheck", n)
- Fatal("typecheck %v", Oconv(int(n.Op), 0))
+ Fatalf("typecheck %v", Oconv(int(n.Op), 0))
/*
* names
}
if l.Orig != l && l.Op == ONAME {
- Fatal("found non-orig name node %v", l)
+ Fatalf("found non-orig name node %v", l)
}
l.Addrtaken = true
if l.Name != nil && l.Name.Param != nil && l.Name.Param.Closure != nil {
tp := getthisx(t).Type.Type
if l.Left == nil || !Eqtype(l.Left.Type, tp) {
- Fatal("method receiver")
+ Fatalf("method receiver")
}
default:
return
}
if t.Etype != TINTER {
- Fatal("OITAB of %v", t)
+ Fatalf("OITAB of %v", t)
}
n.Type = Ptrto(Types[TUINTPTR])
break OpSwitch
return
}
if !Isslice(t) && t.Etype != TSTRING {
- Fatal("OSPTR of %v", t)
+ Fatalf("OSPTR of %v", t)
}
if t.Etype == TSTRING {
n.Type = Ptrto(Types[TUINT8])
Yyerror("%v is both field and method", n.Right.Sym)
}
if f1.Width == BADWIDTH {
- Fatal("lookdot badwidth %v %p", f1, f1)
+ Fatalf("lookdot badwidth %v %p", f1, f1)
}
n.Xoffset = f1.Width
n.Type = f1.Type
tt = tt.Type
}
} else {
- Fatal("method mismatch: %v for %v", rcvr, tt)
+ Fatalf("method mismatch: %v for %v", rcvr, tt)
}
}
*/
func fielddup(n *Node, hash map[string]bool) {
if n.Op != ONAME {
- Fatal("fielddup: not ONAME")
+ Fatalf("fielddup: not ONAME")
}
name := n.Sym.Name
if hash[name] {
func indexdup(n *Node, hash map[int64]*Node) {
if n.Op != OLITERAL {
- Fatal("indexdup: not OLITERAL")
+ Fatalf("indexdup: not OLITERAL")
}
v := Mpgetfix(n.Val().U.(*Mpint))
func stringtoarraylit(np **Node) {
n := *np
if n.Left.Op != OLITERAL || n.Left.Val().Ctype() != CTSTR {
- Fatal("stringtoarraylit %v", n)
+ Fatalf("stringtoarraylit %v", n)
}
s := n.Left.Val().U.(string)
fmt.Printf(" %v", l.N.Sym)
}
fmt.Printf("\n")
- Fatal("typecheckdef loop")
+ Fatalf("typecheckdef loop")
}
n.Walkdef = 2
switch n.Op {
default:
- Fatal("typecheckdef %v", Oconv(int(n.Op), 0))
+ Fatalf("typecheckdef %v", Oconv(int(n.Op), 0))
// not really syms
case OGOTO, OLABEL:
break
}
- Fatal("var without type, init: %v", n.Sym)
+ Fatalf("var without type, init: %v", n.Sym)
}
if n.Name.Defn.Op == ONAME {
ret:
if n.Op != OLITERAL && n.Type != nil && isideal(n.Type) {
- Fatal("got %v for %v", n.Type, n)
+ Fatalf("got %v for %v", n.Type, n)
}
if typecheckdefstack.N != n {
- Fatal("typecheckdefstack mismatch")
+ Fatalf("typecheckdefstack mismatch")
}
l = typecheckdefstack
typecheckdefstack = l.Next
default:
Dump("unsafenmagic", r)
- Fatal("impossible %v node after dot insertion", Oconv(int(r1.Op), obj.FmtSharp))
+ Fatalf("impossible %v node after dot insertion", Oconv(int(r1.Op), obj.FmtSharp))
goto bad
}
}
if cpuprofile != "" {
f, err := os.Create(cpuprofile)
if err != nil {
- Fatal("%v", err)
+ Fatalf("%v", err)
}
if err := pprof.StartCPUProfile(f); err != nil {
- Fatal("%v", err)
+ Fatalf("%v", err)
}
AtExit(pprof.StopCPUProfile)
}
}
f, err := os.Create(memprofile)
if err != nil {
- Fatal("%v", err)
+ Fatalf("%v", err)
}
AtExit(func() {
runtime.GC() // profile all outstanding allocations
if err := pprof.WriteHeapProfile(f); err != nil {
- Fatal("%v", err)
+ Fatalf("%v", err)
}
})
}
ORECOVER,
OGETG:
if n.Typecheck == 0 {
- Fatal("missing typecheck: %v", Nconv(n, obj.FmtSign))
+ Fatalf("missing typecheck: %v", Nconv(n, obj.FmtSign))
}
init := n.Ninit
n.Ninit = nil
// the value received.
case ORECV:
if n.Typecheck == 0 {
- Fatal("missing typecheck: %v", Nconv(n, obj.FmtSign))
+ Fatalf("missing typecheck: %v", Nconv(n, obj.FmtSign))
}
init := n.Ninit
n.Ninit = nil
f := n.List.N
if f.Op != OCALLFUNC && f.Op != OCALLMETH && f.Op != OCALLINTER {
- Fatal("expected return of call, have %v", f)
+ Fatalf("expected return of call, have %v", f)
}
n.List = concat(list1(f), ascompatet(int(n.Op), rl, &f.Type, 0, &n.Ninit))
break
}
if n.Op == ONAME {
- Fatal("walkstmt ended up with name: %v", Nconv(n, obj.FmtSign))
+ Fatalf("walkstmt ended up with name: %v", Nconv(n, obj.FmtSign))
}
*np = n
// not okay to use n->ninit when walking n,
// because we might replace n with some other node
// and would lose the init list.
- Fatal("walkexpr init == &n->ninit")
+ Fatalf("walkexpr init == &n->ninit")
}
if n.Ninit != nil {
}
if n.Typecheck != 1 {
- Fatal("missed typecheck: %v\n", Nconv(n, obj.FmtSign))
+ Fatalf("missed typecheck: %v\n", Nconv(n, obj.FmtSign))
}
switch n.Op {
default:
Dump("walk", n)
- Fatal("walkexpr: switch 1 unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
+ Fatalf("walkexpr: switch 1 unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
case OTYPE,
ONONAME,
case ODOTTYPE, ODOTTYPE2:
if !isdirectiface(n.Type) || Isfat(n.Type) {
- Fatal("walkexpr ODOTTYPE") // should see inside OAS only
+ Fatalf("walkexpr ODOTTYPE") // should see inside OAS only
}
walkexpr(&n.Left, init)
goto ret
goto ret
case ORECV:
- Fatal("walkexpr ORECV") // should see inside OAS only
+ Fatalf("walkexpr ORECV") // should see inside OAS only
case OSLICE, OSLICEARR, OSLICESTR:
walkexpr(&n.Left, init)
case ONEW:
if n.Esc == EscNone {
if n.Type.Type.Width >= 1<<16 {
- Fatal("large ONEW with EscNone: %v", n)
+ Fatalf("large ONEW with EscNone: %v", n)
}
r := temp(n.Type.Type)
r = Nod(OAS, r, nil) // zero temp
typecheck(&r, Erv)
if n.Type.Etype != TBOOL {
- Fatal("cmp %v", n.Type)
+ Fatalf("cmp %v", n.Type)
}
r.Type = n.Type
n = r
case OAPPEND:
// order should make sure we only see OAS(node, OAPPEND), which we handle above.
- Fatal("append outside assignment")
+ Fatalf("append outside assignment")
case OCOPY:
n = copyany(n, init, flag_race)
t := n.Type
if n.Esc == EscNone {
if !isSmallMakeSlice(n) {
- Fatal("non-small OMAKESLICE with EscNone: %v", n)
+ Fatalf("non-small OMAKESLICE with EscNone: %v", n)
}
// var arr [r]T
// n = arr[:l]
// ifaceeq(i1 any-1, i2 any-2) (ret bool);
case OCMPIFACE:
if !Eqtype(n.Left.Type, n.Right.Type) {
- Fatal("ifaceeq %v %v %v", Oconv(int(n.Op), 0), n.Left.Type, n.Right.Type)
+ Fatalf("ifaceeq %v %v %v", Oconv(int(n.Op), 0), n.Left.Type, n.Right.Type)
}
var fn *Node
if isnilinter(n.Left.Type) {
goto ret
}
- Fatal("missing switch %v", Oconv(int(n.Op), 0))
+ Fatalf("missing switch %v", Oconv(int(n.Op), 0))
// Expressions that are constant at run time but not
// considered const by the language spec are not turned into
}
if ucount != 0 {
- Fatal("ascompatet: too many function calls evaluating parameters")
+ Fatalf("ascompatet: too many function calls evaluating parameters")
}
return concat(nn, mm)
}
n.Esc = esc
typecheck(&n, Erv)
if n.Type == nil {
- Fatal("mkdotargslice: typecheck failed")
+ Fatalf("mkdotargslice: typecheck failed")
}
walkexpr(&n, init)
}
func convas(n *Node, init **NodeList) *Node {
if n.Op != OAS {
- Fatal("convas: not OAS %v", Oconv(int(n.Op), 0))
+ Fatalf("convas: not OAS %v", Oconv(int(n.Op), 0))
}
n.Typecheck = 1
switch l.Op {
default:
- Fatal("reorder3 unexpected lvalue %v", Oconv(int(l.Op), obj.FmtSharp))
+ Fatalf("reorder3 unexpected lvalue %v", Oconv(int(l.Op), obj.FmtSharp))
case ONAME:
break
func outervalue(n *Node) *Node {
for {
if n.Op == OXDOT {
- Fatal("OXDOT in walk")
+ Fatalf("OXDOT in walk")
}
if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
n = n.Left
func vmkcall(fn *Node, t *Type, init **NodeList, va []*Node) *Node {
if fn.Type == nil || fn.Type.Etype != TFUNC {
- Fatal("mkcall %v %v", fn, fn.Type)
+ Fatalf("mkcall %v %v", fn, fn.Type)
}
var args *NodeList
func chanfn(name string, n int, t *Type) *Node {
if t.Etype != TCHAN {
- Fatal("chanfn %v", t)
+ Fatalf("chanfn %v", t)
}
fn := syslook(name, 1)
switch n {
default:
- Fatal("chanfn %d", n)
+ Fatalf("chanfn %d", n)
case 1:
substArgTypes(fn, t.Type)
case 2:
func mapfn(name string, t *Type) *Node {
if t.Etype != TMAP {
- Fatal("mapfn %v", t)
+ Fatalf("mapfn %v", t)
}
fn := syslook(name, 1)
substArgTypes(fn, t.Down, t.Type, t.Down, t.Type)
func mapfndel(name string, t *Type) *Node {
if t.Etype != TMAP {
- Fatal("mapfn %v", t)
+ Fatalf("mapfn %v", t)
}
fn := syslook(name, 1)
substArgTypes(fn, t.Down, t.Type, t.Down)
a := algtype1(t, nil)
if a != AMEM && a != -1 {
- Fatal("eqfor %v", t)
+ Fatalf("eqfor %v", t)
}
if a == AMEM {
}
if !islvalue(cmpl) || !islvalue(cmpr) {
- Fatal("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
+ Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
}
l = temp(Ptrto(t))
switch n.Op {
default:
- Fatal("usefield %v", Oconv(int(n.Op), 0))
+ Fatalf("usefield %v", Oconv(int(n.Op), 0))
case ODOT, ODOTPTR:
break
}
field := dotField[typeSym{t.Orig, n.Right.Sym}]
if field == nil {
- Fatal("usefield %v %v without paramfld", n.Left.Type, n.Right.Sym)
+ Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Right.Sym)
}
if field.Note == nil || !strings.Contains(*field.Note, "go:\"track\"") {
return
// no package statement. This allows us to test more
// than one invalid import statement in a single file.
if nerrors == 0 {
- Fatal("phase error in import")
+ Fatalf("phase error in import")
}
}
case 15:
var op int
switch align {
default:
- gc.Fatal("sgen: invalid alignment %d for %v", align, n.Type)
+ gc.Fatalf("sgen: invalid alignment %d for %v", align, n.Type)
case 1:
op = ppc64.AMOVBU
}
if w%int64(align) != 0 {
- gc.Fatal("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
+ gc.Fatalf("sgen: unaligned size %d (align=%d) for %v", w, align, n.Type)
}
c := int32(w / int64(align))
continue
}
if n.Class != gc.PAUTO {
- gc.Fatal("needzero class %d", n.Class)
+ gc.Fatalf("needzero class %d", n.Class)
}
if n.Type.Width%int64(gc.Widthptr) != 0 || n.Xoffset%int64(gc.Widthptr) != 0 || n.Type.Width == 0 {
- gc.Fatal("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
+ gc.Fatalf("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
}
if lo != hi && n.Xoffset+n.Type.Width >= lo-int64(2*gc.Widthreg) {
}
default:
- gc.Fatal("cgen_hmul %v", t)
+ gc.Fatalf("cgen_hmul %v", t)
}
gc.Cgen(&n1, res)
q := uint64(w / 8) // dwords
if gc.Reginuse(ppc64.REGRT1) {
- gc.Fatal("%v in use during clearfat", obj.Rconv(ppc64.REGRT1))
+ gc.Fatalf("%v in use during clearfat", obj.Rconv(ppc64.REGRT1))
}
var r0 gc.Node
gc.Warnl(int(p.Lineno), "generated nil check")
}
if p.From.Type != obj.TYPE_REG {
- gc.Fatal("invalid nil check %v\n", p)
+ gc.Fatalf("invalid nil check %v\n", p)
}
/*
switch as {
default:
- gc.Fatal("ginscon2")
+ gc.Fatalf("ginscon2")
case ppc64.ACMP:
if -ppc64.BIG <= c && c <= ppc64.BIG {
switch uint32(ft)<<16 | uint32(tt) {
default:
- gc.Fatal("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
+ gc.Fatalf("gmove %v -> %v", gc.Tconv(f.Type, obj.FmtLong), gc.Tconv(t.Type, obj.FmtLong))
/*
* integer copy and truncate
case ppc64.AAND, ppc64.AMULLD:
if p.From.Type == obj.TYPE_CONST {
gc.Debug['h'] = 1
- gc.Fatal("bad inst: %v", p)
+ gc.Fatalf("bad inst: %v", p)
}
case ppc64.ACMP, ppc64.ACMPU:
if p.From.Type == obj.TYPE_MEM || p.To.Type == obj.TYPE_MEM {
gc.Debug['h'] = 1
- gc.Fatal("bad inst: %v", p)
+ gc.Fatalf("bad inst: %v", p)
}
}
if w != 0 && ((f != nil && p.From.Width < int64(w)) || (t != nil && p.To.Type != obj.TYPE_REG && p.To.Width > int64(w))) {
gc.Dump("f", f)
gc.Dump("t", t)
- gc.Fatal("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
+ gc.Fatalf("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
}
return p
*/
func optoas(op int, t *gc.Type) int {
if t == nil {
- gc.Fatal("optoas: t is nil")
+ gc.Fatalf("optoas: t is nil")
}
a := int(obj.AXXX)
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
- gc.Fatal("optoas: no entry for op=%v type=%v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("optoas: no entry for op=%v type=%v", gc.Oconv(int(op), 0), t)
case gc.OEQ<<16 | gc.TBOOL,
gc.OEQ<<16 | gc.TINT8,
info := &p.Info
*info = progtable[p.As]
if info.Flags == 0 {
- gc.Fatal("proginfo: unknown instruction %v", p)
+ gc.Fatalf("proginfo: unknown instruction %v", p)
}
if (info.Flags&gc.RegRead != 0) && p.Reg == 0 {
return i
}
}
- gc.Fatal("as2variant: instruction %v is not a variant of itself", obj.Aconv(as))
+ gc.Fatalf("as2variant: instruction %v is not a variant of itself", obj.Aconv(as))
return 0
}
if res.Op != gc.OINDREG && res.Op != gc.ONAME {
gc.Dump("n", n)
gc.Dump("res", res)
- gc.Fatal("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0))
+ gc.Fatalf("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0))
}
switch n.Op {
default:
- gc.Fatal("cgen64 %v", gc.Oconv(int(n.Op), 0))
+ gc.Fatalf("cgen64 %v", gc.Oconv(int(n.Op), 0))
case gc.OMINUS:
gc.Cgen(n.Left, res)
var br *obj.Prog
switch op {
default:
- gc.Fatal("cmp64 %v %v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("cmp64 %v %v", gc.Oconv(int(op), 0), t)
// cmp hi
// jne L
continue
}
if n.Class != gc.PAUTO {
- gc.Fatal("needzero class %d", n.Class)
+ gc.Fatalf("needzero class %d", n.Class)
}
if n.Type.Width%int64(gc.Widthptr) != 0 || n.Xoffset%int64(gc.Widthptr) != 0 || n.Type.Width == 0 {
- gc.Fatal("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
+ gc.Fatalf("var %v has size %d offset %d", gc.Nconv(n, obj.FmtLong), int(n.Type.Width), int(n.Xoffset))
}
if lo != hi && n.Xoffset+n.Type.Width == lo-int64(2*gc.Widthptr) {
// merge with range we already have
*/
func cgen_div(op int, nl *gc.Node, nr *gc.Node, res *gc.Node) {
if gc.Is64(nl.Type) {
- gc.Fatal("cgen_div %v", nl.Type)
+ gc.Fatalf("cgen_div %v", nl.Type)
}
var t *gc.Type
*/
func cgen_shift(op int, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) {
if nl.Type.Width > 4 {
- gc.Fatal("cgen_shift %v", nl.Type)
+ gc.Fatalf("cgen_shift %v", nl.Type)
}
w := int(nl.Type.Width * 8)
switch n.Op {
default:
gc.Dump("cgen_floatsse", n)
- gc.Fatal("cgen_floatsse %v", gc.Oconv(int(n.Op), 0))
+ gc.Fatalf("cgen_floatsse %v", gc.Oconv(int(n.Op), 0))
return
case gc.OMINUS,
*/
func optoas(op int, t *gc.Type) int {
if t == nil {
- gc.Fatal("optoas: t is nil")
+ gc.Fatalf("optoas: t is nil")
}
a := obj.AXXX
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
- gc.Fatal("optoas: no entry %v-%v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("optoas: no entry %v-%v", gc.Oconv(int(op), 0), t)
case gc.OADDR<<16 | gc.TPTR32:
a = x86.ALEAL
if !gc.Thearch.Use387 {
switch uint32(op)<<16 | uint32(et) {
default:
- gc.Fatal("foptoas-sse: no entry %v-%v", gc.Oconv(int(op), 0), t)
+ gc.Fatalf("foptoas-sse: no entry %v-%v", gc.Oconv(int(op), 0), t)
case gc.OCMP<<16 | gc.TFLOAT32:
a = x86.AUCOMISS
return x86.AFCHS
}
- gc.Fatal("foptoas %v %v %#x", gc.Oconv(int(op), 0), t, flg)
+ gc.Fatalf("foptoas %v %v %#x", gc.Oconv(int(op), 0), t, flg)
return 0
}
*/
func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) {
if !gc.Is64(n.Type) {
- gc.Fatal("split64 %v", n.Type)
+ gc.Fatalf("split64 %v", n.Type)
}
if nsclean >= len(sclean) {
- gc.Fatal("split64 clean")
+ gc.Fatalf("split64 clean")
}
sclean[nsclean].Op = gc.OEMPTY
nsclean++
func splitclean() {
if nsclean <= 0 {
- gc.Fatal("splitclean")
+ gc.Fatalf("splitclean")
}
nsclean--
if sclean[nsclean].Op != gc.OEMPTY {
switch uint32(ft)<<16 | uint32(tt) {
default:
// should not happen
- gc.Fatal("gmove %v -> %v", f, t)
+ gc.Fatalf("gmove %v -> %v", f, t)
return
/*
gmove(f, &t1)
switch tt {
default:
- gc.Fatal("gmove %v", t)
+ gc.Fatalf("gmove %v", t)
case gc.TINT8:
gins(x86.ACMPL, &t1, ncon(-0x80&(1<<32-1)))
}
if gc.Ismem(t) {
if f.Op != gc.OREGISTER || f.Reg != x86.REG_F0 {
- gc.Fatal("gmove %v", f)
+ gc.Fatalf("gmove %v", f)
}
a = x86.AFMOVFP
if ft == gc.TFLOAT64 {
// should not happen
fatal:
- gc.Fatal("gmove %v -> %v", gc.Nconv(f, obj.FmtLong), gc.Nconv(t, obj.FmtLong))
+ gc.Fatalf("gmove %v -> %v", gc.Nconv(f, obj.FmtLong), gc.Nconv(t, obj.FmtLong))
return
}
switch uint32(ft)<<16 | uint32(tt) {
// should not happen
default:
- gc.Fatal("gmove %v -> %v", f, t)
+ gc.Fatalf("gmove %v -> %v", f, t)
return
*/
func gins(as int, f *gc.Node, t *gc.Node) *obj.Prog {
if as == x86.AFMOVF && f != nil && f.Op == gc.OREGISTER && t != nil && t.Op == gc.OREGISTER {
- gc.Fatal("gins MOVF reg, reg")
+ gc.Fatalf("gins MOVF reg, reg")
}
if as == x86.ACVTSD2SS && f != nil && f.Op == gc.OLITERAL {
- gc.Fatal("gins CVTSD2SS const")
+ gc.Fatalf("gins CVTSD2SS const")
}
if as == x86.AMOVSD && t != nil && t.Op == gc.OREGISTER && t.Reg == x86.REG_F0 {
- gc.Fatal("gins MOVSD into F0")
+ gc.Fatalf("gins MOVSD into F0")
}
if as == x86.AMOVL && f != nil && f.Op == gc.OADDR && f.Left.Op == gc.ONAME && f.Left.Class != gc.PEXTERN && f.Left.Class != gc.PFUNC {
case x86.ALEAL:
if f != nil && gc.Isconst(f, gc.CTNIL) {
- gc.Fatal("gins LEAL nil %v", f.Type)
+ gc.Fatalf("gins LEAL nil %v", f.Type)
}
}
if true && w != 0 && f != nil && (p.From.Width > int64(w) || p.To.Width > int64(w)) {
gc.Dump("bad width from:", f)
gc.Dump("bad width to:", t)
- gc.Fatal("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
+ gc.Fatalf("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
}
if p.To.Type == obj.TYPE_ADDR && w > 0 {
- gc.Fatal("bad use of addr: %v", p)
+ gc.Fatalf("bad use of addr: %v", p)
}
return p
*/
func copyas(a *obj.Addr, v *obj.Addr) bool {
if x86.REG_AL <= a.Reg && a.Reg <= x86.REG_BL {
- gc.Fatal("use of byte register")
+ gc.Fatalf("use of byte register")
}
if x86.REG_AL <= v.Reg && v.Reg <= x86.REG_BL {
- gc.Fatal("use of byte register")
+ gc.Fatalf("use of byte register")
}
if a.Type != v.Type || a.Name != v.Name || a.Reg != v.Reg {
info := &p.Info
*info = progtable[p.As]
if info.Flags == 0 {
- gc.Fatal("unknown instruction %v", p)
+ gc.Fatalf("unknown instruction %v", p)
}
if (info.Flags&gc.ShiftCX != 0) && p.From.Type != obj.TYPE_CONST {