"os"
"os/exec"
"path/filepath"
- "runtime"
"testing"
)
// Check that the package's own init function has the package's priority
for _, pkginit := range initdata.Inits {
if pkginit.InitFunc == test.wantinits[0] {
- if initdata.Priority != pkginit.Priority {
- t.Errorf("%s: got self priority %d; want %d", test.pkgpath, pkginit.Priority, initdata.Priority)
- }
found = true
break
}
t.Errorf("%s: could not find expected function %q", test.pkgpath, test.wantinits[0])
}
- // Each init function in the list other than the first one is a
- // dependency of the function immediately before it. Check that
- // the init functions appear in descending priority order.
- priority := initdata.Priority
- for _, wantdepinit := range test.wantinits[1:] {
- found = false
- for _, pkginit := range initdata.Inits {
- if pkginit.InitFunc == wantdepinit {
- if priority <= pkginit.Priority {
- t.Errorf("%s: got dep priority %d; want less than %d", test.pkgpath, pkginit.Priority, priority)
- }
- found = true
- priority = pkginit.Priority
- break
- }
- }
-
- if !found {
- t.Errorf("%s: could not find expected function %q", test.pkgpath, wantdepinit)
- }
- }
+ // FIXME: the original version of this test was written against
+ // the v1 export data scheme for capturing init functions, so it
+ // verified the priority values. We moved away from the priority
+ // scheme some time ago; it is not clear how much work it would be
+ // to validate the new init export data.
}
}
{pkgpath: "time", name: "Nanosecond", want: "const Nanosecond Duration", wantval: "1"},
{pkgpath: "unicode", name: "IsUpper", want: "func IsUpper(r rune) bool"},
{pkgpath: "unicode", name: "MaxRune", want: "const MaxRune untyped rune", wantval: "1114111"},
- {pkgpath: "imports", wantinits: []string{"imports..import", "fmt..import", "math..import"}},
+ {pkgpath: "imports", wantinits: []string{"imports..import", "fmt..import"}},
{pkgpath: "importsar", name: "Hello", want: "var Hello string"},
{pkgpath: "aliases", name: "A14", want: "type A14 = func(int, T0) chan T2"},
{pkgpath: "aliases", name: "C0", want: "type C0 struct{f1 C1; f2 C1}"},
}
func TestGoxImporter(t *testing.T) {
- testenv.MustHaveGoBuild(t)
-
+ testenv.MustHaveExec(t) // this is to skip nacl, js
initmap := make(map[*types.Package]InitData)
imp := GetImporter([]string{"testdata"}, initmap)
}
}
-func TestObjImporter(t *testing.T) {
- testenv.MustHaveGoBuild(t)
+// gccgoPath returns a path to gccgo if it is present (either in
+// path or specified via GCCGO environment variable), or an
+// empty string if no gccgo is available.
+func gccgoPath() string {
+ gccgoname := os.Getenv("GCCGO")
+ if gccgoname == "" {
+ gccgoname = "gccgo"
+ }
+ if gpath, gerr := exec.LookPath(gccgoname); gerr == nil {
+ return gpath
+ }
+ return ""
+}
- // This test relies on gccgo being around, which it most likely will be if we
- // were compiled with gccgo.
- if runtime.Compiler != "gccgo" {
+func TestObjImporter(t *testing.T) {
+ // This test relies on gccgo being around.
+ gpath := gccgoPath()
+ if gpath == "" {
t.Skip("This test needs gccgo")
}
for _, test := range importerTests {
gofile := filepath.Join("testdata", test.pkgpath+".go")
+ if _, err := os.Stat(gofile); os.IsNotExist(err) {
+ continue
+ }
ofile := filepath.Join(tmpdir, test.pkgpath+".o")
afile := filepath.Join(artmpdir, "lib"+test.pkgpath+".a")
- cmd := exec.Command("gccgo", "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile)
+ cmd := exec.Command(gpath, "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile)
out, err := cmd.CombinedOutput()
if err != nil {
t.Logf("%s", out)
)
type parser struct {
- scanner scanner.Scanner
+ scanner *scanner.Scanner
version string // format version
tok rune // current token
lit string // literal string; only valid for Ident, Int, String tokens
pkg *types.Package // reference to imported package
imports map[string]*types.Package // package path -> package object
typeList []types.Type // type number -> type
+ typeData []string // unparsed type data (v3 and later)
initdata InitData // package init priority data
}
func (p *parser) init(filename string, src io.Reader, imports map[string]*types.Package) {
+ p.scanner = new(scanner.Scanner)
+ p.initScanner(filename, src)
+ p.imports = imports
+ p.typeList = make([]types.Type, 1 /* type numbers start at 1 */, 16)
+}
+
+func (p *parser) initScanner(filename string, src io.Reader) {
p.scanner.Init(src)
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
- p.scanner.Whitespace = 1<<'\t' | 1<<'\n' | 1<<' '
+ p.scanner.Whitespace = 1<<'\t' | 1<<' '
p.scanner.Filename = filename // for good error messages
p.next()
- p.imports = imports
- p.typeList = make([]types.Type, 1 /* type numbers start at 1 */, 16)
}
type importError struct {
return lit
}
+func (p *parser) expectEOL() {
+ if p.version == "v1" || p.version == "v2" {
+ p.expect(';')
+ }
+ p.expect('\n')
+}
+
func (p *parser) expectKeyword(keyword string) {
lit := p.expect(scanner.Ident)
if lit != keyword {
buf.WriteString(p.scanner.TokenText())
// This loop needs to examine each character before deciding whether to consume it. If we see a semicolon,
// we need to let it be consumed by p.next().
- for ch := p.scanner.Peek(); ch != ';' && ch != scanner.EOF && p.scanner.Whitespace&(1<<uint(ch)) == 0; ch = p.scanner.Peek() {
+ for ch := p.scanner.Peek(); ch != '\n' && ch != ';' && ch != scanner.EOF && p.scanner.Whitespace&(1<<uint(ch)) == 0; ch = p.scanner.Peek() {
buf.WriteRune(ch)
p.scanner.Next()
}
// reserve reserves the type map entry n for future use.
func (p *parser) reserve(n int) {
- if n != len(p.typeList) {
- p.errorf("invalid type number %d (out of sync)", n)
+ // Notes:
+ // - for pre-V3 export data, the type numbers we see are
+ // guaranteed to be in increasing order, so we append a
+ // reserved entry onto the list.
+ // - for V3+ export data, type numbers can appear in
+ // any order, however the 'types' section tells us the
+ // total number of types, hence typeList is pre-allocated.
+ if len(p.typeData) == 0 {
+ if n != len(p.typeList) {
+ p.errorf("invalid type number %d (out of sync)", n)
+ }
+ p.typeList = append(p.typeList, reserved)
+ } else {
+ if p.typeList[n] != nil {
+ p.errorf("previously visited type number %d", n)
+ }
+ p.typeList[n] = reserved
}
- p.typeList = append(p.typeList, reserved)
}
// update sets the type map entries for the given type numbers nlist to t.
func (p *parser) update(t types.Type, nlist []int) {
+ if len(nlist) != 0 {
+ if t == reserved {
+ p.errorf("internal error: update(%v) invoked on reserved", nlist)
+ }
+ if t == nil {
+ p.errorf("internal error: update(%v) invoked on nil", nlist)
+ }
+ }
for _, n := range nlist {
+ if p.typeList[n] == t {
+ continue
+ }
if p.typeList[n] != reserved {
- p.errorf("typeMap[%d] not reserved", n)
+ p.errorf("internal error: update(%v): %d not reserved", nlist, n)
}
p.typeList[n] = t
}
nt.SetUnderlying(underlying.Underlying())
}
- // collect associated methods
- for p.tok == scanner.Ident {
- p.expectKeyword("func")
- p.expect('(')
- receiver, _ := p.parseParam(pkg)
- p.expect(')')
- name := p.parseName()
- params, isVariadic := p.parseParamList(pkg)
- results := p.parseResultList(pkg)
- p.expect(';')
-
- sig := types.NewSignature(receiver, params, results, isVariadic)
- nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+ if p.tok == '\n' {
+ p.next()
+ // collect associated methods
+ for p.tok == scanner.Ident {
+ p.expectKeyword("func")
+ p.expect('(')
+ receiver, _ := p.parseParam(pkg)
+ p.expect(')')
+ name := p.parseName()
+ params, isVariadic := p.parseParamList(pkg)
+ results := p.parseResultList(pkg)
+ p.expectEOL()
+
+ sig := types.NewSignature(receiver, params, results, isVariadic)
+ nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+ }
}
return nt
case scanner.Int:
n1 := p.parseInt()
if p.tok == '>' {
+ if len(p.typeData) > 0 && p.typeList[n1] == nil {
+ p.parseSavedType(pkg, n1, n)
+ }
t = p.typeList[n1]
- if t == reserved {
- p.errorf("invalid type cycle, type %d not yet defined", n1)
+ if len(p.typeData) == 0 && t == reserved {
+ p.errorf("invalid type cycle, type %d not yet defined (nlist=%v)", n1, n)
}
p.update(t, n)
} else {
default:
p.errorf("expected type number, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+ return nil
+ }
+
+ if t == nil || t == reserved {
+ p.errorf("internal error: bad return from parseType(%v)", n)
}
p.expect('>')
return
}
+// Types = "types" maxp1 exportedp1 (offset length)* .
+func (p *parser) parseTypes(pkg *types.Package) {
+ maxp1 := p.parseInt()
+ exportedp1 := p.parseInt()
+ p.typeList = make([]types.Type, maxp1, maxp1)
+
+ type typeOffset struct {
+ offset int
+ length int
+ }
+ var typeOffsets []typeOffset
+
+ total := 0
+ for i := 1; i < maxp1; i++ {
+ len := p.parseInt()
+ typeOffsets = append(typeOffsets, typeOffset{total, len})
+ total += len
+ }
+
+ // We should now have p.tok pointing to the final newline.
+ // The next runes from the scanner should be the type data.
+
+ var sb strings.Builder
+ for sb.Len() < total {
+ r := p.scanner.Next()
+ if r == scanner.EOF {
+ p.error("unexpected EOF")
+ }
+ sb.WriteRune(r)
+ }
+ allTypeData := sb.String()
+
+ p.typeData = []string{""} // type 0, unused
+ for _, to := range typeOffsets {
+ p.typeData = append(p.typeData, allTypeData[to.offset:to.offset+to.length])
+ }
+
+ for i := 1; i < int(exportedp1); i++ {
+ p.parseSavedType(pkg, i, []int{})
+ }
+}
+
+// parseSavedType parses one saved type definition.
+func (p *parser) parseSavedType(pkg *types.Package, i int, nlist []int) {
+ defer func(s *scanner.Scanner, tok rune, lit string) {
+ p.scanner = s
+ p.tok = tok
+ p.lit = lit
+ }(p.scanner, p.tok, p.lit)
+
+ p.scanner = new(scanner.Scanner)
+ p.initScanner(p.scanner.Filename, strings.NewReader(p.typeData[i]))
+ p.expectKeyword("type")
+ id := p.parseInt()
+ if id != i {
+ p.errorf("type ID mismatch: got %d, want %d", id, i)
+ }
+ if p.typeList[i] == reserved {
+ p.errorf("internal error: %d already reserved in parseSavedType", i)
+ }
+ if p.typeList[i] == nil {
+ p.reserve(i)
+ p.parseTypeSpec(pkg, append(nlist, i))
+ }
+ if p.typeList[i] == nil || p.typeList[i] == reserved {
+ p.errorf("internal error: parseSavedType(%d,%v) reserved/nil", i, nlist)
+ }
+}
+
// PackageInit = unquotedString unquotedString int .
func (p *parser) parsePackageInit() PackageInit {
name := p.parseUnquotedString()
func (p *parser) discardDirectiveWhileParsingTypes(pkg *types.Package) {
for {
switch p.tok {
- case ';':
+ case '\n', ';':
return
case '<':
p.parseType(pkg)
}
}
-// InitDataDirective = ( "v1" | "v2" ) ";" |
+// InitDataDirective = ( "v1" | "v2" | "v3" ) ";" |
// "priority" int ";" |
// "init" { PackageInit } ";" |
// "checksum" unquotedString ";" .
}
switch p.lit {
- case "v1", "v2":
+ case "v1", "v2", "v3":
p.version = p.lit
p.next()
p.expect(';')
+ p.expect('\n')
case "priority":
p.next()
p.initdata.Priority = p.parseInt()
- p.expect(';')
+ p.expectEOL()
case "init":
p.next()
- for p.tok != ';' && p.tok != scanner.EOF {
+ for p.tok != '\n' && p.tok != ';' && p.tok != scanner.EOF {
p.initdata.Inits = append(p.initdata.Inits, p.parsePackageInit())
}
- p.expect(';')
+ p.expectEOL()
case "init_graph":
p.next()
// The graph data is thrown away for now.
- for p.tok != ';' && p.tok != scanner.EOF {
+ for p.tok != '\n' && p.tok != ';' && p.tok != scanner.EOF {
p.parseInt64()
p.parseInt64()
}
- p.expect(';')
+ p.expectEOL()
case "checksum":
// Don't let the scanner try to parse the checksum as a number.
p.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats
p.next()
p.parseUnquotedString()
- p.expect(';')
+ p.expectEOL()
default:
p.errorf("unexpected identifier: %q", p.lit)
// "pkgpath" unquotedString ";" |
// "prefix" unquotedString ";" |
// "import" unquotedString unquotedString string ";" |
+// "indirectimport" unquotedString unquotedstring ";" |
// "func" Func ";" |
// "type" Type ";" |
// "var" Var ";" |
}
switch p.lit {
- case "v1", "v2", "priority", "init", "init_graph", "checksum":
+ case "v1", "v2", "v3", "priority", "init", "init_graph", "checksum":
p.parseInitDataDirective()
case "package":
p.next()
p.pkgname = p.parseUnquotedString()
p.maybeCreatePackage()
- if p.version == "v2" && p.tok != ';' {
+ if p.version != "v1" && p.tok != '\n' && p.tok != ';' {
p.parseUnquotedString()
p.parseUnquotedString()
}
- p.expect(';')
+ p.expectEOL()
case "pkgpath":
p.next()
p.pkgpath = p.parseUnquotedString()
p.maybeCreatePackage()
- p.expect(';')
+ p.expectEOL()
case "prefix":
p.next()
p.pkgpath = p.parseUnquotedString()
- p.expect(';')
+ p.expectEOL()
case "import":
p.next()
pkgpath := p.parseUnquotedString()
p.getPkg(pkgpath, pkgname)
p.parseString()
- p.expect(';')
+ p.expectEOL()
+
+ case "indirectimport":
+ p.next()
+ pkgname := p.parseUnquotedString()
+ pkgpath := p.parseUnquotedString()
+ p.getPkg(pkgpath, pkgname)
+ p.expectEOL()
+
+ case "types":
+ p.next()
+ p.parseTypes(p.pkg)
+ p.expectEOL()
case "func":
p.next()
if fun != nil {
p.pkg.Scope().Insert(fun)
}
- p.expect(';')
+ p.expectEOL()
case "type":
p.next()
p.parseType(p.pkg)
- p.expect(';')
+ p.expectEOL()
case "var":
p.next()
v := p.parseVar(p.pkg)
p.pkg.Scope().Insert(v)
- p.expect(';')
+ p.expectEOL()
case "const":
p.next()
c := p.parseConst(p.pkg)
p.pkg.Scope().Insert(c)
- p.expect(';')
+ p.expectEOL()
default:
p.errorf("unexpected identifier: %q", p.lit)