--- /dev/null
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements operations on ideal constants.
+
+package types
+
+import (
+ "big"
+ "go/token"
+ "strconv"
+)
+
+
+// TODO(gri) Consider changing the API so Const is an interface
+// and operations on consts don't have to type switch.
+
+// A Const implements an ideal constant Value.
+// The zero value z for a Const is not a valid constant value.
+type Const struct {
+ // representation of constant values:
+ // ideal bool -> bool
+ // ideal int -> *big.Int
+ // ideal float -> *big.Rat
+ // ideal complex -> cmplx
+ // ideal string -> string
+ val interface{}
+}
+
+
+// Representation of complex values.
+type cmplx struct {
+ re, im *big.Rat
+}
+
+
+func assert(cond bool) {
+ if !cond {
+ panic("go/types internal error: assertion failed")
+ }
+}
+
+
+// MakeConst makes an ideal constant from a literal
+// token and the corresponding literal string.
+func MakeConst(tok token.Token, lit string) Const {
+ switch tok {
+ case token.INT:
+ var x big.Int
+ _, ok := x.SetString(lit, 0)
+ assert(ok)
+ return Const{&x}
+ case token.FLOAT:
+ var y big.Rat
+ _, ok := y.SetString(lit)
+ assert(ok)
+ return Const{&y}
+ case token.IMAG:
+ assert(lit[len(lit)-1] == 'i')
+ var im big.Rat
+ _, ok := im.SetString(lit[0 : len(lit)-1])
+ assert(ok)
+ return Const{cmplx{big.NewRat(0, 1), &im}}
+ case token.CHAR:
+ assert(lit[0] == '\'' && lit[len(lit)-1] == '\'')
+ code, _, _, err := strconv.UnquoteChar(lit[1:len(lit)-1], '\'')
+ assert(err == nil)
+ return Const{big.NewInt(int64(code))}
+ case token.STRING:
+ s, err := strconv.Unquote(lit)
+ assert(err == nil)
+ return Const{s}
+ }
+ panic("unreachable")
+}
+
+
+// MakeZero returns the zero constant for the given type.
+func MakeZero(typ *Type) Const {
+ // TODO(gri) fix this
+ return Const{0}
+}
+
+
+// Match attempts to match the internal constant representations of x and y.
+// If the attempt is successful, the result is the values of x and y,
+// if necessary converted to have the same internal representation; otherwise
+// the results are invalid.
+func (x Const) Match(y Const) (u, v Const) {
+ switch a := x.val.(type) {
+ case bool:
+ if _, ok := y.val.(bool); ok {
+ u, v = x, y
+ }
+ case *big.Int:
+ switch y.val.(type) {
+ case *big.Int:
+ u, v = x, y
+ case *big.Rat:
+ var z big.Rat
+ z.SetInt(a)
+ u, v = Const{&z}, y
+ case cmplx:
+ var z big.Rat
+ z.SetInt(a)
+ u, v = Const{cmplx{&z, big.NewRat(0, 1)}}, y
+ }
+ case *big.Rat:
+ switch y.val.(type) {
+ case *big.Int:
+ v, u = y.Match(x)
+ case *big.Rat:
+ u, v = x, y
+ case cmplx:
+ u, v = Const{cmplx{a, big.NewRat(0, 0)}}, y
+ }
+ case cmplx:
+ switch y.val.(type) {
+ case *big.Int, *big.Rat:
+ v, u = y.Match(x)
+ case cmplx:
+ u, v = x, y
+ }
+ case string:
+ if _, ok := y.val.(string); ok {
+ u, v = x, y
+ }
+ default:
+ panic("unreachable")
+ }
+ return
+}
+
+
+// Convert attempts to convert the constant x to a given type.
+// If the attempt is successful, the result is the new constant;
+// otherwise the result is invalid.
+func (x Const) Convert(typ *Type) Const {
+ // TODO(gri) implement this
+ switch x := x.val.(type) {
+ case bool:
+ case *big.Int:
+ case *big.Rat:
+ case cmplx:
+ case string:
+ }
+ return x
+}
+
+
+func (x Const) String() string {
+ switch x := x.val.(type) {
+ case bool:
+ if x {
+ return "true"
+ }
+ return "false"
+ case *big.Int:
+ return x.String()
+ case *big.Rat:
+ return x.FloatString(10) // 10 digits of precision after decimal point seems fine
+ case cmplx:
+ // TODO(gri) don't print 0 components
+ return x.re.FloatString(10) + " + " + x.im.FloatString(10) + "i"
+ case string:
+ return x
+ }
+ panic("unreachable")
+}
+
+
+func (x Const) UnaryOp(op token.Token) Const {
+ panic("unimplemented")
+}
+
+
+func (x Const) BinaryOp(op token.Token, y Const) Const {
+ var z interface{}
+ switch x := x.val.(type) {
+ case bool:
+ z = binaryBoolOp(x, op, y.val.(bool))
+ case *big.Int:
+ z = binaryIntOp(x, op, y.val.(*big.Int))
+ case *big.Rat:
+ z = binaryFloatOp(x, op, y.val.(*big.Rat))
+ case cmplx:
+ z = binaryCmplxOp(x, op, y.val.(cmplx))
+ case string:
+ z = binaryStringOp(x, op, y.val.(string))
+ default:
+ panic("unreachable")
+ }
+ return Const{z}
+}
+
+
+func binaryBoolOp(x bool, op token.Token, y bool) interface{} {
+ switch op {
+ case token.EQL:
+ return x == y
+ case token.NEQ:
+ return x != y
+ }
+ panic("unreachable")
+}
+
+
+func binaryIntOp(x *big.Int, op token.Token, y *big.Int) interface{} {
+ var z big.Int
+ switch op {
+ case token.ADD:
+ return z.Add(x, y)
+ case token.SUB:
+ return z.Sub(x, y)
+ case token.MUL:
+ return z.Mul(x, y)
+ case token.QUO:
+ return z.Quo(x, y)
+ case token.REM:
+ return z.Rem(x, y)
+ case token.AND:
+ return z.And(x, y)
+ case token.OR:
+ return z.Or(x, y)
+ case token.XOR:
+ return z.Xor(x, y)
+ case token.AND_NOT:
+ return z.AndNot(x, y)
+ case token.SHL:
+ panic("unimplemented")
+ case token.SHR:
+ panic("unimplemented")
+ case token.EQL:
+ return x.Cmp(y) == 0
+ case token.NEQ:
+ return x.Cmp(y) != 0
+ case token.LSS:
+ return x.Cmp(y) < 0
+ case token.LEQ:
+ return x.Cmp(y) <= 0
+ case token.GTR:
+ return x.Cmp(y) > 0
+ case token.GEQ:
+ return x.Cmp(y) >= 0
+ }
+ panic("unreachable")
+}
+
+
+func binaryFloatOp(x *big.Rat, op token.Token, y *big.Rat) interface{} {
+ var z big.Rat
+ switch op {
+ case token.ADD:
+ return z.Add(x, y)
+ case token.SUB:
+ return z.Sub(x, y)
+ case token.MUL:
+ return z.Mul(x, y)
+ case token.QUO:
+ return z.Quo(x, y)
+ case token.EQL:
+ return x.Cmp(y) == 0
+ case token.NEQ:
+ return x.Cmp(y) != 0
+ case token.LSS:
+ return x.Cmp(y) < 0
+ case token.LEQ:
+ return x.Cmp(y) <= 0
+ case token.GTR:
+ return x.Cmp(y) > 0
+ case token.GEQ:
+ return x.Cmp(y) >= 0
+ }
+ panic("unreachable")
+}
+
+
+func binaryCmplxOp(x cmplx, op token.Token, y cmplx) interface{} {
+ a, b := x.re, x.im
+ c, d := y.re, y.im
+ switch op {
+ case token.ADD:
+ // (a+c) + i(b+d)
+ var re, im big.Rat
+ re.Add(a, c)
+ im.Add(b, d)
+ return cmplx{&re, &im}
+ case token.SUB:
+ // (a-c) + i(b-d)
+ var re, im big.Rat
+ re.Sub(a, c)
+ im.Sub(b, d)
+ return cmplx{&re, &im}
+ case token.MUL:
+ // (ac-bd) + i(bc+ad)
+ var ac, bd, bc, ad big.Rat
+ ac.Mul(a, c)
+ bd.Mul(b, d)
+ bc.Mul(b, c)
+ ad.Mul(a, d)
+ var re, im big.Rat
+ re.Sub(&ac, &bd)
+ im.Add(&bc, &ad)
+ return cmplx{&re, &im}
+ case token.QUO:
+ // (ac+bd)/s + i(bc-ad)/s, with s = cc + dd
+ var ac, bd, bc, ad, s big.Rat
+ ac.Mul(a, c)
+ bd.Mul(b, d)
+ bc.Mul(b, c)
+ ad.Mul(a, d)
+ s.Add(c.Mul(c, c), d.Mul(d, d))
+ var re, im big.Rat
+ re.Add(&ac, &bd)
+ re.Quo(&re, &s)
+ im.Sub(&bc, &ad)
+ im.Quo(&im, &s)
+ return cmplx{&re, &im}
+ case token.EQL:
+ return a.Cmp(c) == 0 && b.Cmp(d) == 0
+ case token.NEQ:
+ return a.Cmp(c) != 0 || b.Cmp(d) != 0
+ }
+ panic("unreachable")
+}
+
+
+func binaryStringOp(x string, op token.Token, y string) interface{} {
+ switch op {
+ case token.ADD:
+ return x + y
+ case token.EQL:
+ return x == y
+ case token.NEQ:
+ return x != y
+ case token.LSS:
+ return x < y
+ case token.LEQ:
+ return x <= y
+ case token.GTR:
+ return x > y
+ case token.GEQ:
+ return x >= y
+ }
+ panic("unreachable")
+}
--- /dev/null
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements an ast.Importer for gc generated object files.
+// TODO(gri) Eventually move this into a separate package outside types.
+
+package types
+
+import (
+ "big"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "io"
+ "os"
+ "path/filepath"
+ "runtime"
+ "scanner"
+ "strconv"
+)
+
+
+const trace = false // set to true for debugging
+
+var (
+ pkgRoot = filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH)
+ pkgExts = [...]string{".a", ".5", ".6", ".8"}
+)
+
+
+// findPkg returns the filename and package id for an import path.
+// If no file was found, an empty filename is returned.
+func findPkg(path string) (filename, id string) {
+ if len(path) == 0 {
+ return
+ }
+
+ id = path
+ var noext string
+ switch path[0] {
+ default:
+ // "x" -> "$GOROOT/pkg/$GOOS_$GOARCH/x.ext", "x"
+ noext = filepath.Join(pkgRoot, path)
+
+ case '.':
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ cwd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+ noext = filepath.Join(cwd, path)
+ id = noext
+
+ case '/':
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && f.IsRegular() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+
+// gcParser parses the exports inside a gc compiler-produced
+// object/archive file and populates its scope with the results.
+type gcParser struct {
+ scanner scanner.Scanner
+ tok int // current token
+ lit string // literal string; only valid for Ident, Int, String tokens
+ id string // package id of imported package
+ scope *ast.Scope // scope of imported package; alias for deps[id]
+ deps map[string]*ast.Scope // package id -> package scope
+}
+
+
+func (p *gcParser) init(filename, id string, src io.Reader) {
+ p.scanner.Init(src)
+ p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
+ p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
+ p.scanner.Whitespace = 1<<'\t' | 1<<' '
+ p.scanner.Filename = filename // for good error messages
+ p.next()
+ p.id = id
+ p.scope = ast.NewScope(nil)
+ p.deps = map[string]*ast.Scope{"unsafe": Unsafe, id: p.scope}
+}
+
+
+func (p *gcParser) next() {
+ p.tok = p.scanner.Scan()
+ switch p.tok {
+ case scanner.Ident, scanner.Int, scanner.String:
+ p.lit = p.scanner.TokenText()
+ default:
+ p.lit = ""
+ }
+ if trace {
+ fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
+ }
+}
+
+
+// GcImporter implements the ast.Importer signature.
+func GcImporter(path string) (name string, scope *ast.Scope, err os.Error) {
+ if path == "unsafe" {
+ return path, Unsafe, nil
+ }
+
+ defer func() {
+ if r := recover(); r != nil {
+ err = r.(importError) // will re-panic if r is not an importError
+ if trace {
+ panic(err) // force a stack trace
+ }
+ }
+ }()
+
+ filename, id := findPkg(path)
+ if filename == "" {
+ err = os.ErrorString("can't find import: " + id)
+ return
+ }
+
+ buf, err := ExportData(filename)
+ if err != nil {
+ return
+ }
+ defer buf.Close()
+
+ if trace {
+ fmt.Printf("importing %s\n", filename)
+ }
+
+ var p gcParser
+ p.init(filename, id, buf)
+ name, scope = p.parseExport()
+
+ return
+}
+
+
+// ----------------------------------------------------------------------------
+// Error handling
+
+// Internal errors are boxed as importErrors.
+type importError struct {
+ pos scanner.Position
+ err os.Error
+}
+
+
+func (e importError) String() string {
+ return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
+}
+
+
+func (p *gcParser) error(err interface{}) {
+ if s, ok := err.(string); ok {
+ err = os.ErrorString(s)
+ }
+ // panic with a runtime.Error if err is not an os.Error
+ panic(importError{p.scanner.Pos(), err.(os.Error)})
+}
+
+
+func (p *gcParser) errorf(format string, args ...interface{}) {
+ p.error(fmt.Sprintf(format, args...))
+}
+
+
+func (p *gcParser) expect(tok int) string {
+ lit := p.lit
+ if p.tok != tok {
+ p.errorf("expected %q, got %q (%q)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
+ }
+ p.next()
+ return lit
+}
+
+
+func (p *gcParser) expectSpecial(tok string) {
+ sep := 'x' // not white space
+ i := 0
+ for i < len(tok) && p.tok == int(tok[i]) && sep > ' ' {
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ i++
+ }
+ if i < len(tok) {
+ p.errorf("expected %q, got %q", tok, tok[0:i])
+ }
+}
+
+
+func (p *gcParser) expectKeyword(keyword string) {
+ lit := p.expect(scanner.Ident)
+ if lit != keyword {
+ p.errorf("expected keyword %s, got %q", keyword, lit)
+ }
+}
+
+
+// ----------------------------------------------------------------------------
+// Import declarations
+
+// ImportPath = string_lit .
+//
+func (p *gcParser) parsePkgId() *ast.Scope {
+ id, err := strconv.Unquote(p.expect(scanner.String))
+ if err != nil {
+ p.error(err)
+ }
+
+ scope := p.scope // id == "" stands for the imported package id
+ if id != "" {
+ if scope = p.deps[id]; scope == nil {
+ scope = ast.NewScope(nil)
+ p.deps[id] = scope
+ }
+ }
+
+ return scope
+}
+
+
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+func (p *gcParser) parseDotIdent() string {
+ ident := ""
+ if p.tok != scanner.Int {
+ sep := 'x' // not white space
+ for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
+ ident += p.lit
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ }
+ }
+ if ident == "" {
+ p.expect(scanner.Ident) // use expect() for error handling
+ }
+ return ident
+}
+
+
+// ExportedName = ImportPath "." dotIdentifier .
+//
+func (p *gcParser) parseExportedName(kind ast.ObjKind) *ast.Object {
+ scope := p.parsePkgId()
+ p.expect('.')
+ name := p.parseDotIdent()
+
+ // a type may have been declared before - if it exists
+ // already in the respective package scope, return that
+ // type
+ if kind == ast.Typ {
+ if obj := scope.Lookup(name); obj != nil {
+ assert(obj.Kind == ast.Typ)
+ return obj
+ }
+ }
+
+ // any other object must be a newly declared object -
+ // create it and insert it into the package scope
+ obj := ast.NewObj(kind, name)
+ if scope.Insert(obj) != nil {
+ p.errorf("already declared: %s", obj.Name)
+ }
+
+ // a new type object is a named type and may be referred
+ // to before the underlying type is known - set it up
+ if kind == ast.Typ {
+ obj.Type = &Name{Obj: obj}
+ }
+
+ return obj
+}
+
+
+// ----------------------------------------------------------------------------
+// Types
+
+// BasicType = identifier .
+//
+func (p *gcParser) parseBasicType() Type {
+ obj := Universe.Lookup(p.expect(scanner.Ident))
+ if obj == nil || obj.Kind != ast.Typ {
+ p.errorf("not a basic type: %s", obj.Name)
+ }
+ return obj.Type.(Type)
+}
+
+
+// ArrayType = "[" int_lit "]" Type .
+//
+func (p *gcParser) parseArrayType() Type {
+ // "[" already consumed and lookahead known not to be "]"
+ lit := p.expect(scanner.Int)
+ p.expect(']')
+ elt := p.parseType()
+ n, err := strconv.Atoui64(lit)
+ if err != nil {
+ p.error(err)
+ }
+ return &Array{Len: n, Elt: elt}
+}
+
+
+// MapType = "map" "[" Type "]" Type .
+//
+func (p *gcParser) parseMapType() Type {
+ p.expectKeyword("map")
+ p.expect('[')
+ key := p.parseType()
+ p.expect(']')
+ elt := p.parseType()
+ return &Map{Key: key, Elt: elt}
+}
+
+
+// Name = identifier | "?" .
+//
+func (p *gcParser) parseName() (name string) {
+ switch p.tok {
+ case scanner.Ident:
+ name = p.lit
+ p.next()
+ case '?':
+ // anonymous
+ p.next()
+ default:
+ p.error("name expected")
+ }
+ return
+}
+
+
+// Field = Name Type [ ":" string_lit ] .
+//
+func (p *gcParser) parseField(scope *ast.Scope) {
+ // TODO(gri) The code below is not correct for anonymous fields:
+ // The name is the type name; it should not be empty.
+ name := p.parseName()
+ ftyp := p.parseType()
+ if name == "" {
+ // anonymous field - ftyp must be T or *T and T must be a type name
+ ftyp = Deref(ftyp)
+ if ftyp, ok := ftyp.(*Name); ok {
+ name = ftyp.Obj.Name
+ } else {
+ p.errorf("anonymous field expected")
+ }
+ }
+ if p.tok == ':' {
+ p.next()
+ tag := p.expect(scanner.String)
+ _ = tag // TODO(gri) store tag somewhere
+ }
+ fld := ast.NewObj(ast.Var, name)
+ fld.Type = ftyp
+ scope.Insert(fld)
+}
+
+
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
+//
+func (p *gcParser) parseStructType() Type {
+ p.expectKeyword("struct")
+ p.expect('{')
+ scope := ast.NewScope(nil)
+ if p.tok != '}' {
+ p.parseField(scope)
+ for p.tok == ';' {
+ p.next()
+ p.parseField(scope)
+ }
+ }
+ p.expect('}')
+ return &Struct{}
+}
+
+
+// Parameter = ( identifier | "?" ) [ "..." ] Type .
+//
+func (p *gcParser) parseParameter(scope *ast.Scope, isVariadic *bool) {
+ name := p.parseName()
+ if name == "" {
+ name = "_" // cannot access unnamed identifiers
+ }
+ if isVariadic != nil {
+ if *isVariadic {
+ p.error("... not on final argument")
+ }
+ if p.tok == '.' {
+ p.expectSpecial("...")
+ *isVariadic = true
+ }
+ }
+ ptyp := p.parseType()
+ par := ast.NewObj(ast.Var, name)
+ par.Type = ptyp
+ scope.Insert(par)
+}
+
+
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
+//
+func (p *gcParser) parseParameters(scope *ast.Scope, isVariadic *bool) {
+ p.expect('(')
+ if p.tok != ')' {
+ p.parseParameter(scope, isVariadic)
+ for p.tok == ',' {
+ p.next()
+ p.parseParameter(scope, isVariadic)
+ }
+ }
+ p.expect(')')
+}
+
+
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
+//
+func (p *gcParser) parseSignature(scope *ast.Scope, isVariadic *bool) {
+ p.parseParameters(scope, isVariadic)
+
+ // optional result type
+ switch p.tok {
+ case scanner.Ident, scanner.String, '[', '*', '<':
+ // single, unnamed result
+ result := ast.NewObj(ast.Var, "_")
+ result.Type = p.parseType()
+ scope.Insert(result)
+ case '(':
+ // named or multiple result(s)
+ p.parseParameters(scope, nil)
+ }
+}
+
+
+// FuncType = "func" Signature .
+//
+func (p *gcParser) parseFuncType() Type {
+ // "func" already consumed
+ scope := ast.NewScope(nil)
+ isVariadic := false
+ p.parseSignature(scope, &isVariadic)
+ return &Func{IsVariadic: isVariadic}
+}
+
+
+// MethodSpec = identifier Signature .
+//
+func (p *gcParser) parseMethodSpec(scope *ast.Scope) {
+ p.expect(scanner.Ident)
+ isVariadic := false
+ p.parseSignature(scope, &isVariadic)
+}
+
+
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = MethodSpec { ";" MethodSpec } .
+//
+func (p *gcParser) parseInterfaceType() Type {
+ p.expectKeyword("interface")
+ p.expect('{')
+ scope := ast.NewScope(nil)
+ if p.tok != '}' {
+ p.parseMethodSpec(scope)
+ for p.tok == ';' {
+ p.next()
+ p.parseMethodSpec(scope)
+ }
+ }
+ p.expect('}')
+ return &Interface{}
+}
+
+
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+//
+func (p *gcParser) parseChanType() Type {
+ dir := ast.SEND | ast.RECV
+ if p.tok == scanner.Ident {
+ p.expectKeyword("chan")
+ if p.tok == '<' {
+ p.expectSpecial("<-")
+ dir = ast.SEND
+ }
+ } else {
+ p.expectSpecial("<-")
+ p.expectKeyword("chan")
+ dir = ast.RECV
+ }
+ elt := p.parseType()
+ return &Chan{Dir: dir, Elt: elt}
+}
+
+
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+//
+func (p *gcParser) parseType() Type {
+ switch p.tok {
+ case scanner.Ident:
+ switch p.lit {
+ default:
+ return p.parseBasicType()
+ case "struct":
+ return p.parseStructType()
+ case "func":
+ p.next() // parseFuncType assumes "func" is already consumed
+ return p.parseFuncType()
+ case "interface":
+ return p.parseInterfaceType()
+ case "map":
+ return p.parseMapType()
+ case "chan":
+ return p.parseChanType()
+ }
+ case scanner.String:
+ // TypeName
+ return p.parseExportedName(ast.Typ).Type.(Type)
+ case '[':
+ p.next() // look ahead
+ if p.tok == ']' {
+ // SliceType
+ p.next()
+ return &Slice{Elt: p.parseType()}
+ }
+ return p.parseArrayType()
+ case '*':
+ // PointerType
+ p.next()
+ return &Pointer{Base: p.parseType()}
+ case '<':
+ return p.parseChanType()
+ case '(':
+ // "(" Type ")"
+ p.next()
+ typ := p.parseType()
+ p.expect(')')
+ return typ
+ }
+ p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+ return nil
+}
+
+
+// ----------------------------------------------------------------------------
+// Declarations
+
+// ImportDecl = "import" identifier string_lit .
+//
+func (p *gcParser) parseImportDecl() {
+ p.expectKeyword("import")
+ // The identifier has no semantic meaning in the import data.
+ // It exists so that error messages can print the real package
+ // name: binary.ByteOrder instead of "encoding/binary".ByteOrder.
+ // TODO(gri): Save package id -> package name mapping.
+ p.expect(scanner.Ident)
+ p.parsePkgId()
+}
+
+
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+//
+func (p *gcParser) parseInt() (sign, val string) {
+ switch p.tok {
+ case '-':
+ p.next()
+ sign = "-"
+ case '+':
+ p.next()
+ }
+ val = p.expect(scanner.Int)
+ return
+}
+
+
+// number = int_lit [ "p" int_lit ] .
+//
+func (p *gcParser) parseNumber() Const {
+ // mantissa
+ sign, val := p.parseInt()
+ mant, ok := new(big.Int).SetString(sign+val, 10)
+ assert(ok)
+
+ if p.lit == "p" {
+ // exponent (base 2)
+ p.next()
+ sign, val = p.parseInt()
+ exp, err := strconv.Atoui(val)
+ if err != nil {
+ p.error(err)
+ }
+ if sign == "-" {
+ denom := big.NewInt(1)
+ denom.Lsh(denom, exp)
+ return Const{new(big.Rat).SetFrac(mant, denom)}
+ }
+ if exp > 0 {
+ mant.Lsh(mant, exp)
+ }
+ return Const{new(big.Rat).SetInt(mant)}
+ }
+
+ return Const{mant}
+}
+
+
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
+//
+func (p *gcParser) parseConstDecl() {
+ p.expectKeyword("const")
+ obj := p.parseExportedName(ast.Con)
+ var x Const
+ var typ Type
+ if p.tok != '=' {
+ obj.Type = p.parseType()
+ }
+ p.expect('=')
+ switch p.tok {
+ case scanner.Ident:
+ // bool_lit
+ if p.lit != "true" && p.lit != "false" {
+ p.error("expected true or false")
+ }
+ x = Const{p.lit == "true"}
+ typ = Bool.Underlying
+ p.next()
+ case '-', scanner.Int:
+ // int_lit
+ x = p.parseNumber()
+ typ = Int.Underlying
+ if _, ok := x.val.(*big.Rat); ok {
+ typ = Float64.Underlying
+ }
+ case '(':
+ // complex_lit
+ p.next()
+ re := p.parseNumber()
+ p.expect('+')
+ im := p.parseNumber()
+ p.expect(')')
+ x = Const{cmplx{re.val.(*big.Rat), im.val.(*big.Rat)}}
+ typ = Complex128.Underlying
+ case scanner.String:
+ // string_lit
+ x = MakeConst(token.STRING, p.lit)
+ p.next()
+ typ = String.Underlying
+ default:
+ p.error("expected literal")
+ }
+ if obj.Type == nil {
+ obj.Type = typ
+ }
+ _ = x // TODO(gri) store x somewhere
+}
+
+
+// TypeDecl = "type" ExportedName Type .
+//
+func (p *gcParser) parseTypeDecl() {
+ p.expectKeyword("type")
+ obj := p.parseExportedName(ast.Typ)
+ typ := p.parseType()
+
+ name := obj.Type.(*Name)
+ assert(name.Underlying == nil)
+ assert(Underlying(typ) == typ)
+ name.Underlying = typ
+}
+
+
+// VarDecl = "var" ExportedName Type .
+//
+func (p *gcParser) parseVarDecl() {
+ p.expectKeyword("var")
+ obj := p.parseExportedName(ast.Var)
+ obj.Type = p.parseType()
+}
+
+
+// FuncDecl = "func" ExportedName Signature .
+//
+func (p *gcParser) parseFuncDecl() {
+ // "func" already consumed
+ obj := p.parseExportedName(ast.Fun)
+ obj.Type = p.parseFuncType()
+}
+
+
+// MethodDecl = "func" Receiver identifier Signature .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+//
+func (p *gcParser) parseMethodDecl() {
+ // "func" already consumed
+ scope := ast.NewScope(nil) // method scope
+ p.expect('(')
+ p.parseParameter(scope, nil) // receiver
+ p.expect(')')
+ p.expect(scanner.Ident)
+ isVariadic := false
+ p.parseSignature(scope, &isVariadic)
+
+}
+
+
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+//
+func (p *gcParser) parseDecl() {
+ switch p.lit {
+ case "import":
+ p.parseImportDecl()
+ case "const":
+ p.parseConstDecl()
+ case "type":
+ p.parseTypeDecl()
+ case "var":
+ p.parseVarDecl()
+ case "func":
+ p.next() // look ahead
+ if p.tok == '(' {
+ p.parseMethodDecl()
+ } else {
+ p.parseFuncDecl()
+ }
+ }
+ p.expect('\n')
+}
+
+
+// ----------------------------------------------------------------------------
+// Export
+
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" identifier [ "safe" ] "\n" .
+//
+func (p *gcParser) parseExport() (string, *ast.Scope) {
+ p.expectKeyword("package")
+ name := p.expect(scanner.Ident)
+ if p.tok != '\n' {
+ // A package is safe if it was compiled with the -u flag,
+ // which disables the unsafe package.
+ // TODO(gri) remember "safe" package
+ p.expectKeyword("safe")
+ }
+ p.expect('\n')
+
+ for p.tok != '$' && p.tok != scanner.EOF {
+ p.parseDecl()
+ }
+
+ if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
+ // don't call next()/expect() since reading past the
+ // export data may cause scanner errors (e.g. NUL chars)
+ p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
+ }
+
+ if n := p.scanner.ErrorCount; n != 0 {
+ p.errorf("expected no scanner errors, got %d", n)
+ }
+
+ return name, p.scope
+}