From: Josh Bleecher Snyder Date: Fri, 1 Apr 2016 21:51:02 +0000 (-0700) Subject: cmd/compile: rename Node.Int to Node.Int64 X-Git-Tag: go1.7beta1~949 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=5cab01698a782b28933ac16fdc4c43634b1331d6;p=gostls13.git cmd/compile: rename Node.Int to Node.Int64 gorename -from '"cmd/compile/internal/gc".Node.Int' -to 'Int64' Change-Id: I2fe3bf9a26ae6b0600d990d0c981e4b8b53020a4 Reviewed-on: https://go-review.googlesource.com/21426 Reviewed-by: Brad Fitzpatrick --- diff --git a/src/cmd/compile/internal/amd64/ggen.go b/src/cmd/compile/internal/amd64/ggen.go index d65d128edd..909f7b0c4f 100644 --- a/src/cmd/compile/internal/amd64/ggen.go +++ b/src/cmd/compile/internal/amd64/ggen.go @@ -206,9 +206,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { check := false if t.IsSigned() { check = true - if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<= uint64(nl.Type.Width*8) { // large shift gets 2 shifts by width-1 var n3 gc.Node diff --git a/src/cmd/compile/internal/amd64/gsubr.go b/src/cmd/compile/internal/amd64/gsubr.go index a7c1eb9c12..456fa7cbae 100644 --- a/src/cmd/compile/internal/amd64/gsubr.go +++ b/src/cmd/compile/internal/amd64/gsubr.go @@ -213,7 +213,7 @@ func gmove(f *gc.Node, t *gc.Node) { // 64-bit immediates are really 32-bit sign-extended // unless moving into a register. if gc.Isint[tt] { - if i := con.Int(); int64(int32(i)) != i { + if i := con.Int64(); int64(int32(i)) != i { goto hard } } @@ -1310,7 +1310,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool { if !gc.Isconst(n, gc.CTINT) { break } - v := n.Int() + v := n.Int64() if v >= 32000 || v <= -32000 { break } diff --git a/src/cmd/compile/internal/arm/cgen64.go b/src/cmd/compile/internal/arm/cgen64.go index b9bd53ea9d..337bf03179 100644 --- a/src/cmd/compile/internal/arm/cgen64.go +++ b/src/cmd/compile/internal/arm/cgen64.go @@ -237,7 +237,7 @@ func cgen64(n *gc.Node, res *gc.Node) { // shld hi:lo, c // shld lo:t, c case gc.OLROT: - v := uint64(r.Int()) + v := uint64(r.Int64()) var bl gc.Node gc.Regalloc(&bl, lo1.Type, nil) @@ -291,7 +291,7 @@ func cgen64(n *gc.Node, res *gc.Node) { var p4 *obj.Prog var p5 *obj.Prog if r.Op == gc.OLITERAL { - v := uint64(r.Int()) + v := uint64(r.Int64()) if v >= 64 { // TODO(kaib): replace with gins(AMOVW, nodintconst(0), &al) // here and below (verify it optimizes to EOR) @@ -452,7 +452,7 @@ func cgen64(n *gc.Node, res *gc.Node) { var creg gc.Node var p3 *obj.Prog if r.Op == gc.OLITERAL { - v := uint64(r.Int()) + v := uint64(r.Int64()) if v >= 64 { if bh.Type.Etype == gc.TINT32 { // MOVW bh->31, al diff --git a/src/cmd/compile/internal/arm/ggen.go b/src/cmd/compile/internal/arm/ggen.go index 97bd22a671..d241357d62 100644 --- a/src/cmd/compile/internal/arm/ggen.go +++ b/src/cmd/compile/internal/arm/ggen.go @@ -178,7 +178,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) w := int(nl.Type.Width * 8) if op == gc.OLROT { - v := nr.Int() + v := nr.Int64() var n1 gc.Node gc.Regalloc(&n1, nl.Type, res) if w == 32 { @@ -205,7 +205,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) var n1 gc.Node gc.Regalloc(&n1, nl.Type, res) gc.Cgen(nl, &n1) - sc := uint64(nr.Int()) + sc := uint64(nr.Int64()) if sc == 0 { } else // nothing to do if sc >= uint64(nl.Type.Width*8) { @@ -475,7 +475,7 @@ func ginscon(as obj.As, c int64, n *gc.Node) { } func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { - if t.IsInteger() && n1.Op == gc.OLITERAL && n1.Int() == 0 && n2.Op != gc.OLITERAL { + if t.IsInteger() && n1.Op == gc.OLITERAL && n1.Int64() == 0 && n2.Op != gc.OLITERAL { op = gc.Brrev(op) n1, n2 = n2, n1 } @@ -484,7 +484,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { gc.Regalloc(&g1, n1.Type, &r1) gc.Cgen(n1, &g1) gmove(&g1, &r1) - if t.IsInteger() && n2.Op == gc.OLITERAL && n2.Int() == 0 { + if t.IsInteger() && n2.Op == gc.OLITERAL && n2.Int64() == 0 { gins(arm.ACMP, &r1, n2) } else { gc.Regalloc(&r2, t, n2) diff --git a/src/cmd/compile/internal/arm/gsubr.go b/src/cmd/compile/internal/arm/gsubr.go index a98563304e..26da2e2081 100644 --- a/src/cmd/compile/internal/arm/gsubr.go +++ b/src/cmd/compile/internal/arm/gsubr.go @@ -112,7 +112,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) { case gc.OLITERAL: var n1 gc.Node n.Convconst(&n1, n.Type) - i := n1.Int() + i := n1.Int64() gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i))) i >>= 32 if n.Type.Etype == gc.TINT64 { @@ -1143,7 +1143,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool { if !gc.Isconst(n, gc.CTINT) { break } - v := n.Int() + v := n.Int64() if v >= 32000 || v <= -32000 { break } diff --git a/src/cmd/compile/internal/arm64/ggen.go b/src/cmd/compile/internal/arm64/ggen.go index 3b7a422e5c..9abd901d7a 100644 --- a/src/cmd/compile/internal/arm64/ggen.go +++ b/src/cmd/compile/internal/arm64/ggen.go @@ -151,9 +151,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { check := false if t.IsSigned() { check = true - if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<= uint64(nl.Type.Width)*8 { // large shift gets 2 shifts by width-1 var n3 gc.Node diff --git a/src/cmd/compile/internal/arm64/gsubr.go b/src/cmd/compile/internal/arm64/gsubr.go index a4334517f4..4d64e790af 100644 --- a/src/cmd/compile/internal/arm64/gsubr.go +++ b/src/cmd/compile/internal/arm64/gsubr.go @@ -115,7 +115,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { gc.Cgen(n1, &g1) gmove(&g1, &r1) if t.IsInteger() && gc.Isconst(n2, gc.CTINT) { - ginscon2(optoas(gc.OCMP, t), &r1, n2.Int()) + ginscon2(optoas(gc.OCMP, t), &r1, n2.Int64()) } else { gc.Regalloc(&r2, t, n2) gc.Regalloc(&g2, n1.Type, &r2) diff --git a/src/cmd/compile/internal/gc/cgen.go b/src/cmd/compile/internal/gc/cgen.go index 5cab13bc4e..a1904404e9 100644 --- a/src/cmd/compile/internal/gc/cgen.go +++ b/src/cmd/compile/internal/gc/cgen.go @@ -1032,7 +1032,7 @@ func Agenr(n *Node, a *Node, res *Node) { if Isconst(nl, CTSTR) { Fatalf("constant string constant index") } - v := uint64(nr.Int()) + v := uint64(nr.Int64()) var n2 Node if nl.Type.IsSlice() || nl.Type.IsString() { if Debug['B'] == 0 && !n.Bounded { @@ -1184,7 +1184,7 @@ func Agenr(n *Node, a *Node, res *Node) { if Isconst(nl, CTSTR) { Fatalf("constant string constant index") // front end should handle } - v := uint64(nr.Int()) + v := uint64(nr.Int64()) if nl.Type.IsSlice() || nl.Type.IsString() { if Debug['B'] == 0 && !n.Bounded { nlen := n3 @@ -1374,7 +1374,7 @@ func Agenr(n *Node, a *Node, res *Node) { if Isconst(nl, CTSTR) { Fatalf("constant string constant index") // front end should handle } - v := uint64(nr.Int()) + v := uint64(nr.Int64()) if nl.Type.IsSlice() || nl.Type.IsString() { if Debug['B'] == 0 && !n.Bounded { p1 := Thearch.Ginscmp(OGT, Types[Simtype[TUINT]], &nlen, Nodintconst(int64(v)), +1) @@ -1708,7 +1708,7 @@ func Igen(n *Node, a *Node, res *Node) { // Compute &a[i] as &a + i*width. a.Type = n.Type - a.Xoffset += n.Right.Int() * n.Type.Width + a.Xoffset += n.Right.Int64() * n.Type.Width Fixlargeoffset(a) return } @@ -2214,7 +2214,7 @@ func stkof(n *Node) int64 { return off } if Isconst(n.Right, CTINT) { - return off + t.Elem().Width*n.Right.Int() + return off + t.Elem().Width*n.Right.Int64() } return +1000 // on stack but not sure exactly where @@ -2645,7 +2645,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) { case TUINT64: var m Magic m.W = w - m.Ud = uint64(nr.Int()) + m.Ud = uint64(nr.Int64()) Umagic(&m) if m.Bad != 0 { break @@ -2683,7 +2683,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) { case TINT64: var m Magic m.W = w - m.Sd = nr.Int() + m.Sd = nr.Int64() Smagic(&m) if m.Bad != 0 { break @@ -3243,7 +3243,7 @@ func cgen_slice(n, res *Node, wb bool) { Fatalf("missed slice out of bounds check") } var tmp Node - Nodconst(&tmp, indexRegType, n1.Int()) + Nodconst(&tmp, indexRegType, n1.Int64()) n1 = &tmp } p := Thearch.Ginscmp(OGT, indexRegType, n1, n2, -1) @@ -3327,9 +3327,9 @@ func cgen_slice(n, res *Node, wb bool) { switch j.Op { case OLITERAL: if Isconst(&i, CTINT) { - Nodconst(&j, indexRegType, j.Int()-i.Int()) + Nodconst(&j, indexRegType, j.Int64()-i.Int64()) if Debug_slice > 0 { - Warn("slice: result len == %d", j.Int()) + Warn("slice: result len == %d", j.Int64()) } break } @@ -3344,7 +3344,7 @@ func cgen_slice(n, res *Node, wb bool) { fallthrough case OREGISTER: if i.Op == OLITERAL { - v := i.Int() + v := i.Int64() if v != 0 { ginscon(Thearch.Optoas(OSUB, indexRegType), v, &j) } @@ -3387,9 +3387,9 @@ func cgen_slice(n, res *Node, wb bool) { switch k.Op { case OLITERAL: if Isconst(&i, CTINT) { - Nodconst(&k, indexRegType, k.Int()-i.Int()) + Nodconst(&k, indexRegType, k.Int64()-i.Int64()) if Debug_slice > 0 { - Warn("slice: result cap == %d", k.Int()) + Warn("slice: result cap == %d", k.Int64()) } break } @@ -3410,7 +3410,7 @@ func cgen_slice(n, res *Node, wb bool) { Warn("slice: result cap == 0") } } else if i.Op == OLITERAL { - v := i.Int() + v := i.Int64() if v != 0 { ginscon(Thearch.Optoas(OSUB, indexRegType), v, &k) } @@ -3503,7 +3503,7 @@ func cgen_slice(n, res *Node, wb bool) { w = res.Type.Elem().Width // res is []T, elem size is T.width } if Isconst(&i, CTINT) { - ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int()*w, &xbase) + ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int64()*w, &xbase) } else if Thearch.AddIndex != nil && Thearch.AddIndex(&i, w, &xbase) { // done by back end } else if w == 1 { diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go index 160eb66d5e..95dc898924 100644 --- a/src/cmd/compile/internal/gc/const.go +++ b/src/cmd/compile/internal/gc/const.go @@ -16,16 +16,16 @@ func (n *Node) IntLiteral() (x int64, ok bool) { case n == nil: return case Isconst(n, CTINT): - return n.Int(), true + return n.Int64(), true case Isconst(n, CTBOOL): return int64(obj.Bool2int(n.Bool())), true } return } -// Int returns n as an int. -// n must be an integer constant. -func (n *Node) Int() int64 { +// Int64 returns n as an int64. +// n must be an integer or rune constant. +func (n *Node) Int64() int64 { if !Isconst(n, CTINT) { Fatalf("Int(%v)", n) } @@ -1455,7 +1455,7 @@ func nonnegconst(n *Node) int { if n.Val().U.(*Mpint).Cmp(Minintval[TUINT32]) < 0 || n.Val().U.(*Mpint).Cmp(Maxintval[TINT32]) > 0 { break } - return int(n.Int()) + return int(n.Int64()) } } @@ -1510,7 +1510,7 @@ func (n *Node) Convconst(con *Node, t *Type) { Fatalf("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, FmtLong)) case CTINT, CTRUNE: - i = n.Int() + i = n.Int64() case CTBOOL: i = int64(obj.Bool2int(n.Val().U.(bool))) diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go index b5e1388e57..353d90f593 100644 --- a/src/cmd/compile/internal/gc/gsubr.go +++ b/src/cmd/compile/internal/gc/gsubr.go @@ -438,7 +438,7 @@ func Naddr(a *obj.Addr, n *Node) { case CTINT, CTRUNE: a.Sym = nil a.Type = obj.TYPE_CONST - a.Offset = n.Int() + a.Offset = n.Int64() case CTSTR: datagostring(n.Val().U.(string), a) diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go index ade7772200..85ef78b973 100644 --- a/src/cmd/compile/internal/gc/sinit.go +++ b/src/cmd/compile/internal/gc/sinit.go @@ -433,7 +433,7 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool { initplan(r) if r.Type.IsSlice() { // Init slice. - bound := r.Right.Int() + bound := r.Right.Int64() ta := typArray(r.Type.Elem(), bound) a := staticname(ta, 1) inittemps[r] = a @@ -689,7 +689,7 @@ func arraylit(ctxt int, pass int, n *Node, var_ *Node, init *Nodes) { func slicelit(ctxt int, n *Node, var_ *Node, init *Nodes) { // make an array type corresponding the number of elements we have - t := typArray(n.Type.Elem(), n.Right.Int()) + t := typArray(n.Type.Elem(), n.Right.Int64()) dowidth(t) if ctxt != 0 { @@ -1171,7 +1171,7 @@ func oaslit(n *Node, init *Nodes) bool { func getlit(lit *Node) int { if Smallintconst(lit) { - return int(lit.Int()) + return int(lit.Int64()) } return -1 } @@ -1234,7 +1234,7 @@ func initplan(n *Node) { if a.Op != OKEY || !Smallintconst(a.Left) { Fatalf("initplan arraylit") } - addvalue(p, n.Type.Elem().Width*a.Left.Int(), a.Right) + addvalue(p, n.Type.Elem().Width*a.Left.Int64(), a.Right) } case OSTRUCTLIT: diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index 0f696c2f9a..6fb6c3926c 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -717,7 +717,7 @@ func (s *state) stmt(n *Node) { } else { j = rhs.Right.Right } - if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int() == 0) { + if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int64() == 0) { // [0:...] is the same as [:...] i = nil } @@ -1423,7 +1423,7 @@ func (s *state) expr(n *Node) *ssa.Value { case OLITERAL: switch n.Val().Ctype() { case CTINT: - i := n.Int() + i := n.Int64() switch n.Type.Size() { case 1: return s.constInt8(n.Type, int8(i)) @@ -1825,7 +1825,7 @@ func (s *state) expr(n *Node) *ssa.Value { return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b) case OLROT: a := s.expr(n.Left) - i := n.Right.Int() + i := n.Right.Int64() if i <= 0 || i >= n.Type.Size()*8 { s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i) } @@ -1943,7 +1943,7 @@ func (s *state) expr(n *Node) *ssa.Value { ptrtyp := Ptrto(Types[TUINT8]) ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a) if Isconst(n.Right, CTINT) { - ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int(), ptr) + ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int64(), ptr) } else { ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i) } diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go index f1f6c98ceb..2447bccb5c 100644 --- a/src/cmd/compile/internal/gc/subr.go +++ b/src/cmd/compile/internal/gc/subr.go @@ -484,7 +484,7 @@ func aindex(b *Node, t *Type) *Type { case CTINT, CTRUNE: hasbound = true - bound = b.Int() + bound = b.Int64() if bound < 0 { Yyerror("array bound must be non negative") } @@ -2031,7 +2031,7 @@ func powtwo(n *Node) int { return -1 } - v := uint64(n.Int()) + v := uint64(n.Int64()) b := uint64(1) for i := 0; i < 64; i++ { if b == v { diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go index 636691ebbb..828f5bae97 100644 --- a/src/cmd/compile/internal/gc/typecheck.go +++ b/src/cmd/compile/internal/gc/typecheck.go @@ -1008,7 +1008,7 @@ OpSwitch: } if !n.Bounded && Isconst(n.Right, CTINT) { - x := n.Right.Int() + x := n.Right.Int64() if x < 0 { Yyerror("invalid %s index %v (index must be non-negative)", why, n.Right) } else if t.IsArray() && x >= t.NumElem() { @@ -2212,13 +2212,13 @@ func checksliceindex(l *Node, r *Node, tp *Type) bool { } if r.Op == OLITERAL { - if r.Int() < 0 { + if r.Int64() < 0 { Yyerror("invalid slice index %v (index must be non-negative)", r) return false - } else if tp != nil && tp.NumElem() > 0 && r.Int() > tp.NumElem() { + } else if tp != nil && tp.NumElem() > 0 && r.Int64() > tp.NumElem() { Yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem()) return false - } else if Isconst(l, CTSTR) && r.Int() > int64(len(l.Val().U.(string))) { + } else if Isconst(l, CTSTR) && r.Int64() > int64(len(l.Val().U.(string))) { Yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.Val().U.(string))) return false } else if r.Val().U.(*Mpint).Cmp(Maxintval[TINT]) > 0 { @@ -2834,7 +2834,7 @@ func indexdup(n *Node, hash map[int64]*Node) { Fatalf("indexdup: not OLITERAL") } - v := n.Int() + v := n.Int64() if hash[v] != nil { Yyerror("duplicate index in array literal: %d", v) return diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go index 2715dc03c8..6576daadac 100644 --- a/src/cmd/compile/internal/gc/walk.go +++ b/src/cmd/compile/internal/gc/walk.go @@ -365,7 +365,7 @@ func isSmallMakeSlice(n *Node) bool { } t := n.Type - return Smallintconst(l) && Smallintconst(r) && (t.Elem().Width == 0 || r.Int() < (1<<16)/t.Elem().Width) + return Smallintconst(l) && Smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) } // walk the whole tree of the body of an @@ -1177,7 +1177,7 @@ opswitch: // replace "abc"[1] with 'b'. // delayed until now because "abc"[1] is not // an ideal constant. - v := n.Right.Int() + v := n.Right.Int64() Nodconst(n, n.Type, int64(n.Left.Val().U.(string)[v])) n.Typecheck = 1 @@ -3299,9 +3299,9 @@ func walkrotate(n *Node) *Node { w := int(l.Type.Width * 8) if Smallintconst(l.Right) && Smallintconst(r.Right) { - sl := int(l.Right.Int()) + sl := int(l.Right.Int64()) if sl >= 0 { - sr := int(r.Right.Int()) + sr := int(r.Right.Int64()) if sr >= 0 && sl+sr == w { // Rewrite left shift half to left rotate. if l.Op == OLSH { @@ -3312,7 +3312,7 @@ func walkrotate(n *Node) *Node { n.Op = OLROT // Remove rotate 0 and rotate w. - s := int(n.Right.Int()) + s := int(n.Right.Int64()) if s == 0 || s == w { n = n.Left @@ -3352,7 +3352,7 @@ func walkmul(n *Node, init *Nodes) *Node { // x*0 is 0 (and side effects of x). var pow int var w int - if nr.Int() == 0 { + if nr.Int64() == 0 { cheapexpr(nl, init) Nodconst(n, n.Type, 0) goto ret @@ -3444,10 +3444,10 @@ func walkdiv(n *Node, init *Nodes) *Node { m.W = w if nl.Type.IsSigned() { - m.Sd = nr.Int() + m.Sd = nr.Int64() Smagic(&m) } else { - m.Ud = uint64(nr.Int()) + m.Ud = uint64(nr.Int64()) Umagic(&m) } @@ -3639,7 +3639,7 @@ func walkdiv(n *Node, init *Nodes) *Node { // n = nl & (nr-1) n.Op = OAND - Nodconst(&nc, nl.Type, nr.Int()-1) + Nodconst(&nc, nl.Type, nr.Int64()-1) } else { // n = nl >> pow n.Op = ORSH @@ -3669,7 +3669,7 @@ func bounded(n *Node, max int64) bool { bits := int32(8 * n.Type.Width) if Smallintconst(n) { - v := n.Int() + v := n.Int64() return 0 <= v && v < max } @@ -3677,9 +3677,9 @@ func bounded(n *Node, max int64) bool { case OAND: v := int64(-1) if Smallintconst(n.Left) { - v = n.Left.Int() + v = n.Left.Int64() } else if Smallintconst(n.Right) { - v = n.Right.Int() + v = n.Right.Int64() } if 0 <= v && v < max { @@ -3688,7 +3688,7 @@ func bounded(n *Node, max int64) bool { case OMOD: if !sign && Smallintconst(n.Right) { - v := n.Right.Int() + v := n.Right.Int64() if 0 <= v && v <= max { return true } @@ -3696,7 +3696,7 @@ func bounded(n *Node, max int64) bool { case ODIV: if !sign && Smallintconst(n.Right) { - v := n.Right.Int() + v := n.Right.Int64() for bits > 0 && v >= 2 { bits-- v >>= 1 @@ -3705,7 +3705,7 @@ func bounded(n *Node, max int64) bool { case ORSH: if !sign && Smallintconst(n.Right) { - v := n.Right.Int() + v := n.Right.Int64() if v > int64(bits) { return true } diff --git a/src/cmd/compile/internal/mips64/ggen.go b/src/cmd/compile/internal/mips64/ggen.go index 004fef5f0d..e0c4de2cc8 100644 --- a/src/cmd/compile/internal/mips64/ggen.go +++ b/src/cmd/compile/internal/mips64/ggen.go @@ -264,7 +264,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) var n1 gc.Node gc.Regalloc(&n1, nl.Type, res) gc.Cgen(nl, &n1) - sc := uint64(nr.Int()) + sc := uint64(nr.Int64()) if sc >= uint64(nl.Type.Width*8) { // large shift gets 2 shifts by width-1 var n3 gc.Node diff --git a/src/cmd/compile/internal/ppc64/ggen.go b/src/cmd/compile/internal/ppc64/ggen.go index 605f82ccd2..a89ed8f3a7 100644 --- a/src/cmd/compile/internal/ppc64/ggen.go +++ b/src/cmd/compile/internal/ppc64/ggen.go @@ -138,9 +138,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { check := false if t.IsSigned() { check = true - if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<= uint64(nl.Type.Width*8) { // large shift gets 2 shifts by width-1 var n3 gc.Node diff --git a/src/cmd/compile/internal/ppc64/gsubr.go b/src/cmd/compile/internal/ppc64/gsubr.go index a1c899d26b..de6e2fbe05 100644 --- a/src/cmd/compile/internal/ppc64/gsubr.go +++ b/src/cmd/compile/internal/ppc64/gsubr.go @@ -130,7 +130,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { gc.Cgen(n1, &g1) gmove(&g1, &r1) if t.IsInteger() && gc.Isconst(n2, gc.CTINT) { - ginscon2(optoas(gc.OCMP, t), &r1, n2.Int()) + ginscon2(optoas(gc.OCMP, t), &r1, n2.Int64()) } else { gc.Regalloc(&r2, t, n2) gc.Regalloc(&g2, n1.Type, &r2) diff --git a/src/cmd/compile/internal/x86/cgen64.go b/src/cmd/compile/internal/x86/cgen64.go index e59784b1f8..61e191f87c 100644 --- a/src/cmd/compile/internal/x86/cgen64.go +++ b/src/cmd/compile/internal/x86/cgen64.go @@ -162,7 +162,7 @@ func cgen64(n *gc.Node, res *gc.Node) { // shld hi:lo, c // shld lo:t, c case gc.OLROT: - v := uint64(r.Int()) + v := uint64(r.Int64()) if v >= 32 { // reverse during load to do the first 32 bits of rotate @@ -189,7 +189,7 @@ func cgen64(n *gc.Node, res *gc.Node) { case gc.OLSH: if r.Op == gc.OLITERAL { - v := uint64(r.Int()) + v := uint64(r.Int64()) if v >= 64 { if gc.Is64(r.Type) { splitclean() @@ -278,7 +278,7 @@ func cgen64(n *gc.Node, res *gc.Node) { case gc.ORSH: if r.Op == gc.OLITERAL { - v := uint64(r.Int()) + v := uint64(r.Int64()) if v >= 64 { if gc.Is64(r.Type) { splitclean() @@ -400,8 +400,8 @@ func cgen64(n *gc.Node, res *gc.Node) { if lo2.Op == gc.OLITERAL { // special cases for constants. - lv := uint32(lo2.Int()) - hv := uint32(hi2.Int()) + lv := uint32(lo2.Int64()) + hv := uint32(hi2.Int64()) splitclean() // right side split64(res, &lo2, &hi2) switch n.Op { diff --git a/src/cmd/compile/internal/x86/ggen.go b/src/cmd/compile/internal/x86/ggen.go index eac13fde39..38c3f8fc0e 100644 --- a/src/cmd/compile/internal/x86/ggen.go +++ b/src/cmd/compile/internal/x86/ggen.go @@ -203,9 +203,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node, ax *gc.Node, dx *gc check := false if t.IsSigned() { check = true - if gc.Isconst(nl, gc.CTINT) && nl.Int() != -1<= uint64(nl.Type.Width*8) { // large shift gets 2 shifts by width-1 gins(a, ncon(uint32(w)-1), &n1) diff --git a/src/cmd/compile/internal/x86/gsubr.go b/src/cmd/compile/internal/x86/gsubr.go index 2b8f6b199d..91c009116c 100644 --- a/src/cmd/compile/internal/x86/gsubr.go +++ b/src/cmd/compile/internal/x86/gsubr.go @@ -750,7 +750,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) { case gc.OLITERAL: var n1 gc.Node n.Convconst(&n1, n.Type) - i := n1.Int() + i := n1.Int64() gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i))) i >>= 32 if n.Type.Etype == gc.TINT64 {