diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml new file mode 100644 index 00000000..34fb2454 --- /dev/null +++ b/.github/workflows/go.yml @@ -0,0 +1,26 @@ +name: gocode + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + Test: + strategy: + matrix: + go-version: [1.16.x, 1.17.x, 1.18.x, 1.19.x] + os: [ubuntu-latest, windows-latest, macos-11] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Build + run: go build -v + diff --git a/README.md b/README.md index 3f68b5ba..058b9ec5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,34 @@ -## An autocompletion daemon for the Go programming language +LiteIDE Gocode Tools +========= + +### LiteIDE + +_LiteIDE is a simple, open source, cross-platform Go IDE._ + +### Gocode +_Gocode is a golang code autocomplete support for LiteIDE._ + + +- support Go1.11 Go modules. +- support Go1.18 generics. + +``` +go install github.com/visualfc/gocode@latest + +Windows/Linux: copy GOPATH/bin gocode to liteide/bin +MacOS: copy GOPATH/bin gocode to LiteIDE.app/Contents/MacOS +``` + +### Website +* LiteIDE Source code + +* Gocode Source code + -**IMPORTANT: consider switching to https://github.com/mdempsky/gocode if you have problems starting with Go version 1.10, due to changes in binary packages architecture (introduction of package cache) I'm not going to adjust gocode for it for quite some time. There is a higher chance that fork under the given link will have some solution to the problem sooner or later.** + +*** + +## An autocompletion daemon for the Go programming language Gocode is a helper tool which is intended to be integrated with your source code editor, like vim, neovim and emacs. It provides several advanced capabilities, which currently includes: diff --git a/_testing/all.bash b/_testing/all.bash index 02596a95..d3b7a0ae 100755 --- a/_testing/all.bash +++ b/_testing/all.bash @@ -1,5 +1,6 @@ #!/usr/bin/env bash -gocode close +go build ../. +./gocode close sleep 0.5 echo "--------------------------------------------------------------------" echo "Autocompletion tests..." @@ -7,4 +8,5 @@ echo "--------------------------------------------------------------------" export XDG_CONFIG_HOME="$(mktemp -d)" ./run.rb sleep 0.5 -gocode close +./gocode close +rm ./gocode diff --git a/_testing/test.0008/out.expected b/_testing/test.0008/out.expected index 0f46ccc9..ab1a14b0 100644 --- a/_testing/test.0008/out.expected +++ b/_testing/test.0008/out.expected @@ -1,5 +1,6 @@ -Found 6 candidates: +Found 7 candidates: func Lock() + func TryLock() bool func Unlock() var Mutex sync.Mutex var data map[string][]string diff --git a/_testing/test.0009/out.expected b/_testing/test.0009/out.expected index e7f998c1..70535d97 100644 --- a/_testing/test.0009/out.expected +++ b/_testing/test.0009/out.expected @@ -1,3 +1,4 @@ -Found 2 candidates: +Found 3 candidates: func Lock() + func TryLock() bool func Unlock() diff --git a/_testing/test.0011/out.expected b/_testing/test.0011/out.expected index 7de665e1..03c24f8d 100644 --- a/_testing/test.0011/out.expected +++ b/_testing/test.0011/out.expected @@ -1,12 +1,17 @@ -Found 59 candidates: +Found 70 candidates: func Addr() reflect.Value func Bool() bool func Bytes() []byte func Call(in []reflect.Value) []reflect.Value func CallSlice(in []reflect.Value) []reflect.Value func CanAddr() bool + func CanComplex() bool + func CanConvert(t reflect.Type) bool + func CanFloat() bool + func CanInt() bool func CanInterface() bool func CanSet() bool + func CanUint() bool func Cap() int func Close() func Complex() complex128 @@ -14,6 +19,7 @@ Found 59 candidates: func Elem() reflect.Value func Field(i int) reflect.Value func FieldByIndex(index []int) reflect.Value + func FieldByIndexErr(index []int) (reflect.Value, error) func FieldByName(name string) reflect.Value func FieldByNameFunc(match func(string) bool) reflect.Value func Float() float64 @@ -23,10 +29,12 @@ Found 59 candidates: func InterfaceData() [2]uintptr func IsNil() bool func IsValid() bool + func IsZero() bool func Kind() reflect.Kind func Len() int func MapIndex(key reflect.Value) reflect.Value func MapKeys() []reflect.Value + func MapRange() *reflect.MapIter func Method(i int) reflect.Value func MethodByName(name string) reflect.Value func NumField() int @@ -45,8 +53,10 @@ Found 59 candidates: func SetComplex(x complex128) func SetFloat(x float64) func SetInt(x int64) + func SetIterKey(iter *reflect.MapIter) + func SetIterValue(iter *reflect.MapIter) func SetLen(n int) - func SetMapIndex(key reflect.Value, val reflect.Value) + func SetMapIndex(key reflect.Value, elem reflect.Value) func SetPointer(x unsafe.Pointer) func SetString(x string) func SetUint(x uint64) @@ -58,3 +68,4 @@ Found 59 candidates: func Type() reflect.Type func Uint() uint64 func UnsafeAddr() uintptr + func UnsafePointer() unsafe.Pointer diff --git a/_testing/test.0020/out.expected b/_testing/test.0020/out.expected index d5d01e90..fcd7b3f6 100644 --- a/_testing/test.0020/out.expected +++ b/_testing/test.0020/out.expected @@ -1,5 +1,6 @@ -Found 4 candidates: +Found 5 candidates: func Lock() + func TryLock() bool func Unlock() var Dummy Dummy var Mutex sync.Mutex diff --git a/_testing/test.0025/out.expected b/_testing/test.0025/out.expected index 9f67c8bc..f19058aa 100644 --- a/_testing/test.0025/out.expected +++ b/_testing/test.0025/out.expected @@ -1,5 +1,7 @@ -Found 4 candidates: - func Alignof(any) uintptr - func Offsetof(any) uintptr - func Sizeof(any) uintptr - type Pointer uintptr +Found 6 candidates: + func Add(ptr Pointer, len IntegerType) Pointer + func Alignof(x ArbitraryType) uintptr + func Offsetof(x ArbitraryType) uintptr + func Sizeof(x ArbitraryType) uintptr + func Slice(ptr *ArbitraryType, len IntegerType) []ArbitraryType + type Pointer *ArbitraryType diff --git a/_testing/test.0059/out.expected b/_testing/test.0059/out.expected index 3bf8bfb2..abfde3a3 100644 --- a/_testing/test.0059/out.expected +++ b/_testing/test.0059/out.expected @@ -1,2 +1,2 @@ Found 1 candidates: - func NewReader(r io.Reader) io.Reader + func NewReader(r myio.Reader) myio.Reader diff --git a/ast.go b/ast.go new file mode 100644 index 00000000..5f9862b0 --- /dev/null +++ b/ast.go @@ -0,0 +1,222 @@ +/* + Copyright 2021 The GoPlus Authors (goplus.org) + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "go/ast" + "go/token" + "go/types" + "log" + "reflect" + "strconv" +) + +var ( + underscore = &ast.Ident{Name: "_"} +) + +var ( + identTrue = ident("true") + identFalse = ident("false") + identNil = ident("nil") + identAppend = ident("append") + identLen = ident("len") + identCap = ident("cap") + identNew = ident("new") + identMake = ident("make") + identIota = ident("iota") +) + +func ident(name string) *ast.Ident { + return &ast.Ident{Name: name} +} + +func boolean(v bool) *ast.Ident { + if v { + return identTrue + } + return identFalse +} + +func toRecv(pkg *types.Package, recv *types.Var) *ast.FieldList { + if recv == nil { + return nil + } + var names []*ast.Ident + if name := recv.Name(); name != "" { + names = []*ast.Ident{ident(name)} + } + fld := &ast.Field{Names: names, Type: toType(pkg, recv.Type())} + return &ast.FieldList{List: []*ast.Field{fld}} +} + +// ----------------------------------------------------------------------------- +// function type + +func toFieldList(pkg *types.Package, t *types.Tuple) []*ast.Field { + if t == nil { + return nil + } + n := t.Len() + flds := make([]*ast.Field, n) + for i := 0; i < n; i++ { + item := t.At(i) + var names []*ast.Ident + if name := item.Name(); name != "" { + names = []*ast.Ident{ident(name)} + } + typ := toType(pkg, item.Type()) + flds[i] = &ast.Field{Names: names, Type: typ} + } + return flds +} + +func toFields(pkg *types.Package, t *types.Struct) []*ast.Field { + n := t.NumFields() + flds := make([]*ast.Field, n) + for i := 0; i < n; i++ { + item := t.Field(i) + var names []*ast.Ident + if !item.Embedded() { + names = []*ast.Ident{{Name: item.Name()}} + } + typ := toType(pkg, item.Type()) + fld := &ast.Field{Names: names, Type: typ} + if tag := t.Tag(i); tag != "" { + fld.Tag = &ast.BasicLit{Kind: token.STRING, Value: strconv.Quote(tag)} + } + flds[i] = fld + } + return flds +} + +func toVariadic(fld *ast.Field) { + t, ok := fld.Type.(*ast.ArrayType) + if !ok || t.Len != nil { + panic("TODO: not a slice type") + } + fld.Type = &ast.Ellipsis{Elt: t.Elt} +} + +// ----------------------------------------------------------------------------- + +func toType(pkg *types.Package, typ types.Type) ast.Expr { + switch t := typ.(type) { + case *types.Basic: // bool, int, etc + return toBasicType(pkg, t) + case *types.Pointer: + return &ast.StarExpr{X: toType(pkg, t.Elem())} + case *types.Named: + return toNamedType(pkg, t) + case *types.Interface: + return toInterface(pkg, t) + case *types.Slice: + return toSliceType(pkg, t) + case *types.Array: + return toArrayType(pkg, t) + case *types.Map: + return toMapType(pkg, t) + case *types.Struct: + return toStructType(pkg, t) + case *types.Chan: + return toChanType(pkg, t) + case *types.Signature: + return toFuncType(pkg, t) + case *TypeParam: + return toTypeParam(pkg, t) + } + log.Panicln("TODO: toType -", reflect.TypeOf(typ)) + return nil +} + +func toObjectExpr(pkg *types.Package, v types.Object) ast.Expr { + vpkg, name := v.Pkg(), v.Name() + if vpkg == nil || vpkg == g_daemon.autocomplete.typesPkg { // at universe or at this package + return ident(name) + } + return &ast.SelectorExpr{ + X: ident(vpkg.Name()), + Sel: ident(name), + } +} + +func toBasicType(pkg *types.Package, t *types.Basic) ast.Expr { + if t.Kind() == types.UnsafePointer { + return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")} + } + if (t.Info() & types.IsUntyped) != 0 { + //panic("unexpected: untyped type") + } + return &ast.Ident{Name: t.Name()} +} + +func isUntyped(pkg *types.Package, typ types.Type) bool { + switch t := typ.(type) { + case *types.Basic: + return (t.Info() & types.IsUntyped) != 0 + } + return false +} + +func toChanType(pkg *types.Package, t *types.Chan) ast.Expr { + return &ast.ChanType{Value: toType(pkg, t.Elem()), Dir: chanDirs[t.Dir()]} +} + +var ( + chanDirs = [...]ast.ChanDir{ + types.SendRecv: ast.SEND | ast.RECV, + types.SendOnly: ast.SEND, + types.RecvOnly: ast.RECV, + } +) + +func toStructType(pkg *types.Package, t *types.Struct) ast.Expr { + list := toFields(pkg, t) + return &ast.StructType{Fields: &ast.FieldList{List: list}} +} + +func toArrayType(pkg *types.Package, t *types.Array) ast.Expr { + var len ast.Expr + if n := t.Len(); n < 0 { + len = &ast.Ellipsis{} + } else { + len = &ast.BasicLit{Kind: token.INT, Value: strconv.FormatInt(t.Len(), 10)} + } + return &ast.ArrayType{Len: len, Elt: toType(pkg, t.Elem())} +} + +func toSliceType(pkg *types.Package, t *types.Slice) ast.Expr { + return &ast.ArrayType{Elt: toType(pkg, t.Elem())} +} + +func toMapType(pkg *types.Package, t *types.Map) ast.Expr { + return &ast.MapType{Key: toType(pkg, t.Key()), Value: toType(pkg, t.Elem())} +} + +func toInterface(pkg *types.Package, t *types.Interface) ast.Expr { + var flds []*ast.Field + for i, n := 0, t.NumEmbeddeds(); i < n; i++ { + typ := toType(pkg, t.EmbeddedType(i)) + fld := &ast.Field{Type: typ} + flds = append(flds, fld) + } + for i, n := 0, t.NumExplicitMethods(); i < n; i++ { + fn := t.ExplicitMethod(i) + name := ident(fn.Name()) + typ := toFuncType(pkg, fn.Type().(*types.Signature)) + fld := &ast.Field{Names: []*ast.Ident{name}, Type: typ} + flds = append(flds, fld) + } + return &ast.InterfaceType{Methods: &ast.FieldList{List: flds}} +} diff --git a/autocompletecontext.go b/autocompletecontext.go index d5d4bc47..64bddc50 100644 --- a/autocompletecontext.go +++ b/autocompletecontext.go @@ -6,13 +6,19 @@ import ( "go/ast" "go/parser" "go/token" + "go/types" "log" "os" "path/filepath" "runtime" "sort" "strings" + "sync" "time" + + "github.com/visualfc/gotools/pkgs" + pkgwalk "github.com/visualfc/gotools/types" + "golang.org/x/tools/go/types/typeutil" ) //------------------------------------------------------------------------- @@ -77,7 +83,6 @@ func (b *out_buffers) append_decl(p, name, pkg string, decl *decl, class decl_cl if c1 || c2 || c3 || c4 || c5 { return } - decl.pretty_print_type(b.tmpbuf, b.canonical_aliases) b.candidates = append(b.candidates, candidate{ Name: name, @@ -153,13 +158,28 @@ type auto_complete_context struct { pcache package_cache // packages cache declcache *decl_cache // top-level declarations cache + pkgindex *pkgs.PathPkgsIndex + mutex sync.Mutex + // types + typesWalker *pkgwalk.PkgWalker + typesConf *pkgwalk.PkgConfig + typesPkg *types.Package + typesCursor int } -func new_auto_complete_context(pcache package_cache, declcache *decl_cache) *auto_complete_context { +func new_auto_complete_context(ctx *package_lookup_context, pcache package_cache, declcache *decl_cache) *auto_complete_context { c := new(auto_complete_context) c.current = new_auto_complete_file("", declcache.context) c.pcache = pcache c.declcache = declcache + c.typesWalker = pkgwalk.NewPkgWalker(&ctx.Context) + c.pkgindex = nil + //go func(c *auto_complete_context, ctx build.Context) { + var indexs pkgs.PathPkgsIndex + indexs.LoadIndex(ctx.Context, pkgs.LoadAll) + indexs.Sort() + c.pkgindex = &indexs + //}(c, ctx.Context) return c } @@ -175,7 +195,7 @@ func (c *auto_complete_context) update_caches() { c.pcache.append_packages(ps, other.packages) } - update_packages(ps) + c.update_packages(ps) // fix imports for all files fixup_packages(c.current.filescope, c.current.packages, c.pcache) @@ -286,9 +306,88 @@ func (c *auto_complete_context) get_candidates_from_decl(cc cursor_context, clas func (c *auto_complete_context) get_import_candidates(partial string, b *out_buffers) { currentPackagePath, pkgdirs := g_daemon.context.pkg_dirs() resultSet := map[string]struct{}{} - for _, pkgdir := range pkgdirs { - // convert srcpath to pkgpath and get candidates - get_import_candidates_dir(pkgdir, filepath.FromSlash(partial), b.ignorecase, currentPackagePath, resultSet) + if c.typesWalker.Mod != nil { + //goroot + for _, index := range c.pkgindex.Indexs { + if !index.Goroot { + continue + } + for _, pkg := range index.Pkgs { + if pkg.IsCommand() { + continue + } + if strings.HasPrefix(pkg.ImportPath, "cmd/") || + strings.Contains(pkg.ImportPath, "vendor/") || + strings.Contains(pkg.ImportPath, "internal") { + continue + } + if !has_prefix(pkg.ImportPath, partial, b.ignorecase) { + continue + } + resultSet[pkg.ImportPath] = struct{}{} + } + } + //mod path + resultSet[c.typesWalker.Mod.Root().Path] = struct{}{} + //mod deps + deps := c.typesWalker.Mod.DepImportList(true, true) + //local path + locals := c.typesWalker.Mod.LocalImportList(true) + for _, dep := range deps { + if !has_prefix(dep, partial, b.ignorecase) { + continue + } + if strings.Contains(dep, "/vendor/") || + strings.Contains(dep, "/internal/") || + strings.HasSuffix(dep, "/internal") { + continue + } + resultSet[dep] = struct{}{} + } + for _, local := range locals { + if !has_prefix(local, partial, b.ignorecase) { + continue + } + if strings.Contains(local, "/vendor/") { + continue + } + resultSet[local] = struct{}{} + } + } else if c.pkgindex != nil { + for _, index := range c.pkgindex.Indexs { + for _, pkg := range index.Pkgs { + if pkg.IsCommand() { + continue + } + if !has_prefix(pkg.ImportPath, partial, b.ignorecase) { + continue + } + if pkg.Goroot && + (strings.HasPrefix(pkg.ImportPath, "vendor/") || + strings.HasPrefix(pkg.ImportPath, "cmd/") || + strings.Contains(pkg.ImportPath, "internal")) { + continue + } + if strings.Contains(pkg.ImportPath, "/internal") { + if ipath, ok := internalImportPath(pkg.ImportPath, currentPackagePath); ok { + resultSet[ipath] = struct{}{} + } + continue + } + if strings.Contains(pkg.ImportPath, "/vendor/") { + if ipath, ok := vendorlessImportPath(pkg.ImportPath, currentPackagePath); ok { + resultSet[ipath] = struct{}{} + } + continue + } + resultSet[pkg.ImportPath] = struct{}{} + } + } + } else { + for _, pkgdir := range pkgdirs { + // convert srcpath to pkgpath and get candidates + get_import_candidates_dir(pkgdir, filepath.FromSlash(partial), b.ignorecase, currentPackagePath, resultSet) + } } for k := range resultSet { b.candidates = append(b.candidates, candidate{Name: k, Class: decl_import}) @@ -352,7 +451,7 @@ func (c *auto_complete_context) apropos(file []byte, filename string, cursor int // Does full processing of the currently edited file (top-level declarations plus // active function). - c.current.process_data(filesemi) + c.current.process_data(filesemi, c) // Updates cache of other files and packages. See the function for details of // the process. At the end merges all the top-level declarations into the package @@ -379,13 +478,32 @@ func (c *auto_complete_context) apropos(file []byte, filename string, cursor int } if !ok { var d *decl - if ident, ok := cc.expr.(*ast.Ident); ok && g_config.UnimportedPackages { - p := resolveKnownPackageIdent(ident.Name, c.current.name, c.current.context) - if p != nil { - c.pcache[p.name] = p - d = p.main + // lookup types + if ident, ok := cc.expr.(*ast.Ident); ok { + if g_config.UnimportedPackages { + p := c.resolveKnownPackageIdent(ident.Name, c.current.name, c.current.context) + if p != nil { + c.pcache[p.name] = p + d = p.main + } + } + if d == nil { + if typ := g_daemon.autocomplete.lookup_ident(ident); typ != nil { + if named, ok := typ.(*types.Named); ok { + pkg := named.Obj().Pkg() + dt := toType(pkg, typ.Underlying()) + d = new_decl_full(ident.Name, decl_type, 0, dt, nil, -1, nil) + // add methods + for _, sel := range typeutil.IntuitiveMethodSet(named, nil) { + ft := toType(pkg, sel.Type()) + method := sel.Obj().Name() + d.add_child(new_decl_full(method, decl_func, 0, ft, nil, -1, nil)) + } + } + } } } + if d == nil { return nil, 0 } @@ -432,7 +550,6 @@ func (c *auto_complete_context) apropos(file []byte, filename string, cursor int c.get_candidates_from_decl(cc, class, b) } } - if len(b.candidates) == 0 { return nil, 0 } @@ -441,9 +558,10 @@ func (c *auto_complete_context) apropos(file []byte, filename string, cursor int return b.candidates, partial } -func update_packages(ps map[string]*package_file_cache) { +func (c *auto_complete_context) update_packages(ps map[string]*package_file_cache) { // initiate package cache update done := make(chan bool) + for _, p := range ps { go func(p *package_file_cache) { defer func() { @@ -452,7 +570,7 @@ func update_packages(ps map[string]*package_file_cache) { done <- false } }() - p.update_cache() + p.update_cache(c) done <- true }(p) } diff --git a/autocompletefile.go b/autocompletefile.go index c68f7ca0..d0275247 100644 --- a/autocompletefile.go +++ b/autocompletefile.go @@ -6,7 +6,10 @@ import ( "go/parser" "go/scanner" "go/token" + "go/types" "log" + + "golang.org/x/tools/go/types/typeutil" ) func parse_decl_list(fset *token.FileSet, data []byte) ([]ast.Decl, error) { @@ -64,8 +67,22 @@ func (f *auto_complete_file) offset(p token.Pos) int { } // this one is used for current file buffer exclusively -func (f *auto_complete_file) process_data(data []byte) { - cur, filedata, block := rip_off_decl(data, f.cursor) +func (f *auto_complete_file) process_data(data []byte, ctx *auto_complete_context) { + // topLevelTok fix rip_off_decl on multi var decl + // var (\n jsData = `{ }`\n file2 *File = func() *File { + var topLevelTok token.Token + if cf, ok := ctx.typesWalker.ParsedFileCache[f.name]; ok { + pos := token.Pos(ctx.typesWalker.FileSet.File(cf.Pos()).Base()) + token.Pos(f.cursor) + for _, decl := range cf.Decls { + if pos >= decl.Pos() && pos <= decl.End() { + if decl, ok := decl.(*ast.GenDecl); ok { + topLevelTok = decl.Tok + } + break + } + } + } + cur, filedata, block := rip_off_decl(data, f.cursor, topLevelTok) file, err := parser.ParseFile(f.fset, "", filedata, parser.AllErrors) if err != nil && *g_debug { log_parse_error("Error parsing input file (outer block)", err) @@ -85,6 +102,7 @@ func (f *auto_complete_file) process_data(data []byte) { for _, decl := range file.Decls { append_to_top_decls(f.decls, decl, f.scope) } + if block != nil { // process local function as top-level declaration decls, err := parse_decl_list(f.fset, block) @@ -116,6 +134,7 @@ func (f *auto_complete_file) process_decl_locals(decl ast.Decl) { s := f.scope f.scope = new_scope(f.scope) + f.process_field_list_typeparams(ForFuncType(t.Type), s) f.process_field_list(t.Recv, s) f.process_field_list(t.Type.Params, s) f.process_field_list(t.Type.Results, s) @@ -135,6 +154,7 @@ func (f *auto_complete_file) process_decl(decl ast.Decl) { prevscope := f.scope foreach_decl(decl, func(data *foreach_decl_struct) { class := ast_decl_class(data.decl) + typeparams := ast_decl_typeparams(data.decl) if class != decl_type { f.scope, prevscope = advance_scope(f.scope) } @@ -143,8 +163,9 @@ func (f *auto_complete_file) process_decl(decl ast.Decl) { d := new_decl_full(name.Name, class, ast_decl_flags(data.decl), typ, v, vi, prevscope) if d == nil { - return + continue } + d.typeparams = typeparams f.scope.add_named_decl(d) } @@ -383,6 +404,43 @@ func (f *auto_complete_file) process_field_list(field_list *ast.FieldList, s *sc } } +func (f *auto_complete_file) process_field_list_typeparams(field_list *ast.FieldList, s *scope) { + if field_list == nil { + return + } + for _, tp := range field_list.List { + for _, name := range tp.Names { + if typ := g_daemon.autocomplete.lookup_types(tp.Type); typ != nil { + switch st := typ.(type) { + case *TypeParam: + dt := toType(nil, st.Constraint().Underlying()) + d := new_decl_full(name.Name, decl_type, 0, dt, nil, -1, s) + s.add_named_decl(d) + case *types.Named: + named := st + pkg := named.Obj().Pkg() + dt := toType(pkg, typ.Underlying()) + d := new_decl_full(name.Name, decl_type, 0, dt, nil, -1, s) + // add methods + for _, sel := range typeutil.IntuitiveMethodSet(named, nil) { + ft := toType(pkg, sel.Type()) + method := sel.Obj().Name() + d.add_child(new_decl_full(method, decl_func, 0, ft, nil, -1, s)) + } + s.add_named_decl(d) + default: + dt := toType(nil, typ) + d := new_decl_full(name.Name, decl_type, 0, dt, nil, -1, s) + s.add_named_decl(d) + } + } else { + d := new_decl_full(name.Name, decl_type, 0, tp.Type, nil, -1, s) + s.add_named_decl(d) + } + } + } +} + func (f *auto_complete_file) cursor_in_if_head(s *ast.IfStmt) bool { if f.cursor > f.offset(s.If) && f.cursor <= f.offset(s.Body.Lbrace) { return true @@ -413,7 +471,13 @@ func (f *auto_complete_file) cursor_in(block *ast.BlockStmt) bool { return false } - if f.cursor > f.offset(block.Lbrace) && f.cursor <= f.offset(block.Rbrace) { + // fix block.Rbrace=0 in Go1.14 + end := block.Rbrace + if end < block.Lbrace { + end = block.End() - 1 + } + + if f.cursor > f.offset(block.Lbrace) && f.cursor <= f.offset(end) { return true } return false diff --git a/client.go b/client.go index 3174a2d1..bc6f5c87 100644 --- a/client.go +++ b/client.go @@ -42,6 +42,8 @@ func do_client() int { switch flag.Arg(0) { case "autocomplete": cmd_auto_complete(client) + case "liteide_typesinfo": + cmd_types_info(client) case "close": cmd_close(client) case "status": @@ -103,7 +105,7 @@ func try_to_connect(network, address string) (client *rpc.Client, err error) { return } -func prepare_file_filename_cursor() ([]byte, string, int) { +func prepare_file_filename_cursor(filter bool) ([]byte, string, int, string) { var file []byte var err error @@ -117,19 +119,26 @@ func prepare_file_filename_cursor() ([]byte, string, int) { panic(err.Error()) } - var skipped int - file, skipped = filter_out_shebang(file) - filename := *g_input cursor := -1 offset := "" + addin := "" switch flag.NArg() { case 2: offset = flag.Arg(1) case 3: filename = flag.Arg(1) // Override default filename offset = flag.Arg(2) + case 4: + filename = flag.Arg(1) // Override default filename + offset = flag.Arg(2) + addin = flag.Arg(3) + } + + var skipped int + if filter { + file, skipped = filter_out_shebang(file) } if offset != "" { @@ -142,11 +151,12 @@ func prepare_file_filename_cursor() ([]byte, string, int) { } cursor -= skipped + if filename != "" && !filepath.IsAbs(filename) { cwd, _ := os.Getwd() filename = filepath.Join(cwd, filename) } - return file, filename, cursor + return file, filename, cursor, addin } //------------------------------------------------------------------------- @@ -159,11 +169,26 @@ func cmd_status(c *rpc.Client) { func cmd_auto_complete(c *rpc.Client) { context := pack_build_context(&build.Default) - file, filename, cursor := prepare_file_filename_cursor() + file, filename, cursor, _ := prepare_file_filename_cursor(true) f := get_formatter(*g_format) f.write_candidates(client_auto_complete(c, file, filename, cursor, context)) } +func write_tyepsinfo(infos []string, num int) { + if infos == nil { + return + } + for _, c := range infos { + fmt.Printf("%s\n", c) + } +} + +func cmd_types_info(c *rpc.Client) { + context := pack_build_context(&build.Default) + file, filename, cursor, addin := prepare_file_filename_cursor(true) + write_tyepsinfo(client_types_info(c, file, filename, cursor, addin, context)) +} + func cmd_close(c *rpc.Client) { client_close(c, 0) } diff --git a/cursorcontext.go b/cursorcontext.go index 92c036b1..a0ffc5c8 100644 --- a/cursorcontext.go +++ b/cursorcontext.go @@ -211,6 +211,14 @@ loop: if prev != token.IDENT { break loop } + case token.STRUCT: + // struct { + switch prev { + case token.LBRACE: + // all ok + default: + break loop + } case token.IDENT: // Valid tokens after IDENT are '.', '[', '{' and '('. switch prev { @@ -223,12 +231,16 @@ loop: // This one can only be a part of type initialization, like: // Dummy{}.Hello() // It is valid Go if Hello method is defined on a non-pointer receiver. - if prev != token.PERIOD { + // struct {...}{} + switch prev { + case token.PERIOD, token.LBRACE: + // all ok + default: break loop } this.skip_to_balanced_pair() - case token.RPAREN, token.RBRACK: - // After ']' and ')' their opening counterparts are valid '[', '(', + case token.RPAREN: + // After ')' their opening counterparts are valid '[', '(', // as well as the dot. switch prev { case token.PERIOD, token.LBRACK, token.LPAREN: @@ -237,6 +249,16 @@ loop: break loop } this.skip_to_balanced_pair() + case token.RBRACK: + // After ']' their opening counterparts are valid '[', '(', '{', + // as well as the dot. + switch prev { + case token.PERIOD, token.LBRACK, token.LPAREN, token.LBRACE: + // all ok + default: + break loop + } + this.skip_to_balanced_pair() default: break loop } @@ -253,8 +275,13 @@ loop: // expression. func token_items_to_string(tokens []token_item) string { var buf bytes.Buffer + var last token_item for _, t := range tokens { + if t.tok == token.IDENT && last.tok == token.IDENT { + buf.WriteString(" ") + } buf.WriteString(t.literal()) + last = t } return buf.String() } @@ -309,7 +336,6 @@ func (c *auto_complete_context) deduce_cursor_context(file []byte, cursor int) ( if len(iter.tokens) == 0 { return cursor_context{}, false } - // figure out what is just before the cursor switch tok := iter.token(); tok.tok { case token.STRING: @@ -420,19 +446,19 @@ func (c *auto_complete_context) deduce_cursor_context(file []byte, cursor int) ( // package name has nothing to do with package file name, that's why we need to // scan the packages. And many of them will have conflicts. Can we make a smart // prediction algorithm which will prefer certain packages over another ones? -func resolveKnownPackageIdent(ident string, filename string, context *package_lookup_context) *package_file_cache { +func (c *auto_complete_context) resolveKnownPackageIdent(ident string, filename string, context *package_lookup_context) *package_file_cache { importPath, ok := knownPackageIdents[ident] if !ok { return nil } - path, ok := abs_path_for_package(filename, importPath, context) + path, vname, ok := abs_path_for_package(filename, importPath, context) if !ok { return nil } - p := new_package_file_cache(path, importPath) - p.update_cache() + p := new_package_file_cache(path, importPath, vname) + p.update_cache(c) return p } diff --git a/decl.go b/decl.go index ffd34a4a..ec8126f5 100644 --- a/decl.go +++ b/decl.go @@ -1,14 +1,16 @@ package main import ( - "bytes" "fmt" "go/ast" "go/token" + "go/types" "io" "reflect" "strings" "sync" + + "golang.org/x/tools/go/types/typeutil" ) // decl.class @@ -85,6 +87,9 @@ type decl struct { class decl_class flags decl_flags + // typeparams + typeparams *ast.FieldList + // functions for interface type, fields+methods for struct type children map[string]*decl @@ -526,38 +531,15 @@ func func_return_type(f *ast.FuncType, index int) ast.Expr { } type type_path struct { - pkg string - name string + pkg string + name string + targs []ast.Expr // typeparam index } func (tp *type_path) is_nil() bool { return tp.pkg == "" && tp.name == "" } -// converts type expressions like: -// ast.Expr -// *ast.Expr -// $ast$go/ast.Expr -// to a path that can be used to lookup a type related Decl -func get_type_path(e ast.Expr) (r type_path) { - if e == nil { - return type_path{"", ""} - } - - switch t := e.(type) { - case *ast.Ident: - r.name = t.Name - case *ast.StarExpr: - r = get_type_path(t.X) - case *ast.SelectorExpr: - if ident, ok := t.X.(*ast.Ident); ok { - r.pkg = ident.Name - } - r.name = t.Sel.Name - } - return -} - func lookup_path(tp type_path, scope *scope) *decl { if tp.is_nil() { return nil @@ -597,7 +579,206 @@ func lookup_pkg(tp type_path, scope *scope) string { return decl.name } +func instance_decl(d *decl, typ ast.Expr, targs []ast.Expr) *decl { + return new_decl_full(d.name, d.class, d.flags, typ, d.value, d.value_index, d.scope) +} + +func (c *auto_complete_context) lookup_types(t ast.Expr) types.Type { + conf := c.typesConf + + if typ := lookup_types_expr(t, conf.Info); typ != nil { + return typ + } + if conf.XInfo != nil { + if typ := lookup_types_expr(t, conf.XInfo); typ != nil { + return typ + } + } + return nil +} + +func (c *auto_complete_context) lookup_ident(t ast.Expr) types.Type { + conf := c.typesConf + pos := token.Pos(c.typesCursor) + + if ident, ok := t.(*ast.Ident); ok { + if typ := lookup_types_ident(ident, pos, conf.Info); typ != nil { + return typ + } + if conf.XInfo != nil { + if typ := lookup_types_ident(ident, pos, conf.XInfo); typ != nil { + return typ + } + } + } + return nil +} + +func lookup_types(t ast.Expr) types.Type { + conf := g_daemon.autocomplete.typesConf + pos := token.Pos(g_daemon.autocomplete.typesCursor) + + if ident, ok := t.(*ast.Ident); ok { + if typ := lookup_types_ident(ident, pos, conf.Info); typ != nil { + return typ + } + if conf.XInfo != nil { + if typ := lookup_types_ident(ident, pos, conf.XInfo); typ != nil { + return typ + } + } + return nil + } + if typ := lookup_types_expr(t, conf.Info); typ != nil { + return typ + } + if conf.XInfo != nil { + if typ := lookup_types_expr(t, conf.XInfo); typ != nil { + return typ + } + } + return nil +} + +func lookup_types_scope(pos token.Pos) *types.Scope { + return g_daemon.autocomplete.typesPkg.Scope().Innermost(pos) +} + +type typ_distance struct { + pos token.Pos + typ types.Type +} + +// lookup type by ident, from scope or near instance +func lookup_types_ident(ident *ast.Ident, pos token.Pos, info *types.Info) types.Type { + var typ types.Type + if scope := lookup_types_scope(pos); scope != nil { + if obj := scope.Lookup(ident.Name); obj != nil { + typ = obj.Type() + } + if _, obj := scope.LookupParent(ident.Name, pos); obj != nil { + typ = obj.Type() + } + } + // is typeparams lookup instance + if hasTypeParams(typ) { + return lookup_types_near_instance(ident, pos, info) + } + return typ +} + +func lookup_types_text(text string, typ types.Type, skips map[types.Type]bool) types.Type { +retry: + switch t := typ.(type) { + case *types.Array: + typ = t.Elem() + goto retry + case *types.Map: + typ = t.Elem() + goto retry + case *types.Pointer: + typ = t.Elem() + goto retry + case *types.Slice: + typ = t.Elem() + goto retry + case *types.Chan: + typ = t.Elem() + goto retry + case *types.Named: + if text == types.ExprString(toType(nil, typ)) { + return typ + } + if skips[t] { + return nil + } + skips[t] = true + typ = t.Underlying() + goto retry + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + if r := lookup_types_text(text, t.Field(i).Type(), skips); r != nil { + return r + } + } + } + return nil +} + +// lookup type by type, from type. +func lookup_types_expr(t ast.Expr, info *types.Info) types.Type { + text := types.ExprString(t) + skips := make(map[types.Type]bool) + for k, v := range info.Types { + if v.Type == nil { + continue + } + if text == types.ExprString(k) { + return v.Type + } + if t := lookup_types_text(text, v.Type, skips); t != nil { + return t + } + } + return nil +} + func type_to_decl(t ast.Expr, scope *scope) *decl { + if t == nil { + //TODO + return nil + } + tp := get_type_path(t) + d := lookup_path(tp, scope) + if d == nil { + if st, ok := t.(*ast.StructType); ok { + d = new_decl_full(types.ExprString(t), decl_type, 0, st, nil, -1, scope) + } + // typeparams targs struct type: Typ[struct{...}] + // if typ := g_daemon.autocomplete.lookup_types(t); typ != nil { + // switch st := typ.(type) { + // case *types.Struct: + // dt := toType(nil, typ) + // d = new_decl_full(typ.String(), decl_type, 0, dt, nil, -1, scope) + // case *TypeParam: + // dt := toType(nil, st.Constraint().Underlying()) + // d = new_decl_full(typ.String(), decl_type, 0, dt, nil, -1, scope) + // } + // } + } else if d.typeparams != nil { + // typeparams named type instance + retry: + if x, ok := t.(*ast.StarExpr); ok { + t = x.X + goto retry + } + if typ := g_daemon.autocomplete.lookup_types(t); typ != nil { + if named, ok := typ.(*types.Named); ok { + pkg := named.Obj().Pkg() + dt := toType(pkg, typ.Underlying()) + d = new_decl_full(types.ExprString(t), decl_type, d.flags, dt, nil, -1, scope) + // add methods + for _, sel := range typeutil.IntuitiveMethodSet(named, nil) { + ft := toType(pkg, sel.Type()) + method := sel.Obj().Name() + d.add_child(new_decl_full(method, decl_func, 0, ft, nil, -1, scope)) + } + } + } + } + + if d != nil && d.class == decl_var { + // weird variable declaration pointing to itself + return nil + } + return d +} + +func advance_type_to_decl(t ast.Expr, scope *scope) *decl { + if t == nil { + //TODO + return nil + } tp := get_type_path(t) d := lookup_path(tp, scope) if d != nil && d.class == decl_var { @@ -623,7 +804,7 @@ func advance_to_type(pred type_predicate, v ast.Expr, scope *scope) (ast.Expr, * return v, scope } - decl := type_to_decl(v, scope) + decl := advance_type_to_decl(v, scope) if decl == nil { return nil, nil } @@ -761,6 +942,16 @@ func infer_type(v ast.Expr, scope *scope, index int) (ast.Expr, *scope, bool) { if d.class == decl_package { return ast.NewIdent(t.Name), scope, false } + //check type, fix bug test.0055 + if i, ok := d.typ.(*ast.Ident); ok { + if i.Obj != nil && i.Obj.Decl != nil { + if typ, ok := i.Obj.Decl.(*ast.TypeSpec); ok { + if _, ok := typ.Type.(*ast.Ident); ok { + return infer_type(typ.Type, scope, -1) + } + } + } + } typ, scope := d.infer_type() return typ, scope, d.class == decl_type } @@ -876,6 +1067,23 @@ func infer_type(v ast.Expr, scope *scope, index int) (ast.Expr, *scope, bool) { // this is a function call or a type cast: // myFunc(1,2,3) or int16(myvar) it, s, is_type := infer_type(t.Fun, scope, -1) + if it == nil && funHasTypeArgs(t.Fun) { + if typ := lookup_types(t.Fun); typ != nil { + it = toType(nil, typ) + s = scope + is_type = false + } + } else if ct, ok := it.(*ast.FuncType); ok { + // ast.FuncType.TypeParams != nil + if funcHasTypeParams(ct) { + if typ := g_daemon.autocomplete.lookup_types(t.Fun); typ != nil { + it = toType(nil, typ) + s = scope + is_type = false + } + } + } + if it == nil { break } @@ -910,7 +1118,6 @@ func infer_type(v ast.Expr, scope *scope, index int) (ast.Expr, *scope, bool) { if it == nil { break } - if d := type_to_decl(it, s); d != nil { c := d.find_child_and_in_embedded(t.Sel.Name) if c != nil { @@ -943,7 +1150,6 @@ func infer_type(v ast.Expr, scope *scope, index int) (ast.Expr, *scope, bool) { return t, scope, true default: _ = reflect.TypeOf(v) - //fmt.Println(ty) } return nil, nil, false } @@ -981,6 +1187,7 @@ func (d *decl) infer_type() (ast.Expr, *scope) { var scope *scope d.typ, scope, _ = infer_type(d.value, d.scope, d.value_index) + return d.typ, scope } @@ -1035,6 +1242,11 @@ func (d *decl) find_child_and_in_embedded(name string) *decl { if d == nil { return nil } + if d.is_alias() { + if dd := d.type_dealias(); dd != nil { + return dd.find_child_and_in_embedded(name) + } + } if d.is_visited() { return nil @@ -1122,102 +1334,6 @@ func get_array_len(e ast.Expr) string { return "" } -func pretty_print_type_expr(out io.Writer, e ast.Expr, canonical_aliases map[string]string) { - switch t := e.(type) { - case *ast.StarExpr: - fmt.Fprintf(out, "*") - pretty_print_type_expr(out, t.X, canonical_aliases) - case *ast.Ident: - if strings.HasPrefix(t.Name, "$") { - // beautify anonymous types - switch t.Name[1] { - case 's': - fmt.Fprintf(out, "struct") - case 'i': - // ok, in most cases anonymous interface is an - // empty interface, I'll just pretend that - // it's always true - fmt.Fprintf(out, "interface{}") - } - } else if !*g_debug && strings.HasPrefix(t.Name, "!") { - // these are full package names for disambiguating and pretty - // printing packages within packages, e.g. - // !go/ast!ast vs. !github.com/nsf/my/ast!ast - // another ugly hack, if people are punished in hell for ugly hacks - // I'm screwed... - emarkIdx := strings.LastIndex(t.Name, "!") - path := t.Name[1:emarkIdx] - alias := canonical_aliases[path] - if alias == "" { - alias = t.Name[emarkIdx+1:] - } - fmt.Fprintf(out, alias) - } else { - fmt.Fprintf(out, t.Name) - } - case *ast.ArrayType: - al := "" - if t.Len != nil { - al = get_array_len(t.Len) - } - if al != "" { - fmt.Fprintf(out, "[%s]", al) - } else { - fmt.Fprintf(out, "[]") - } - pretty_print_type_expr(out, t.Elt, canonical_aliases) - case *ast.SelectorExpr: - pretty_print_type_expr(out, t.X, canonical_aliases) - fmt.Fprintf(out, ".%s", t.Sel.Name) - case *ast.FuncType: - fmt.Fprintf(out, "func(") - pretty_print_func_field_list(out, t.Params, canonical_aliases) - fmt.Fprintf(out, ")") - - buf := bytes.NewBuffer(make([]byte, 0, 256)) - nresults := pretty_print_func_field_list(buf, t.Results, canonical_aliases) - if nresults > 0 { - results := buf.String() - if strings.IndexAny(results, ", ") != -1 { - results = "(" + results + ")" - } - fmt.Fprintf(out, " %s", results) - } - case *ast.MapType: - fmt.Fprintf(out, "map[") - pretty_print_type_expr(out, t.Key, canonical_aliases) - fmt.Fprintf(out, "]") - pretty_print_type_expr(out, t.Value, canonical_aliases) - case *ast.InterfaceType: - fmt.Fprintf(out, "interface{}") - case *ast.Ellipsis: - fmt.Fprintf(out, "...") - pretty_print_type_expr(out, t.Elt, canonical_aliases) - case *ast.StructType: - fmt.Fprintf(out, "struct") - case *ast.ChanType: - switch t.Dir { - case ast.RECV: - fmt.Fprintf(out, "<-chan ") - case ast.SEND: - fmt.Fprintf(out, "chan<- ") - case ast.SEND | ast.RECV: - fmt.Fprintf(out, "chan ") - } - pretty_print_type_expr(out, t.Value, canonical_aliases) - case *ast.ParenExpr: - fmt.Fprintf(out, "(") - pretty_print_type_expr(out, t.X, canonical_aliases) - fmt.Fprintf(out, ")") - case *ast.BadExpr: - // TODO: probably I should check that in a separate function - // and simply discard declarations with BadExpr as a part of their - // type - default: - // the element has some weird type, just ignore it - } -} - func pretty_print_func_field_list(out io.Writer, f *ast.FieldList, canonical_aliases map[string]string) int { count := 0 if f == nil { diff --git a/declcache.go b/declcache.go index 215a5a8a..8fa5a46b 100644 --- a/declcache.go +++ b/declcache.go @@ -22,6 +22,7 @@ type package_import struct { alias string abspath string path string + vpath string } // Parses import declarations until the first non-import declaration and fills @@ -33,9 +34,9 @@ func collect_package_imports(filename string, decls []ast.Decl, context *package for _, spec := range gd.Specs { imp := spec.(*ast.ImportSpec) path, alias := path_and_alias(imp) - abspath, ok := abs_path_for_package(filename, path, context) + abspath, vpath, ok := abs_path_for_package(filename, path, context) if ok && alias != "_" { - pi = append(pi, package_import{alias, abspath, path}) + pi = append(pi, package_import{alias, abspath, path, vpath}) } } } else { @@ -119,13 +120,15 @@ func (f *decl_file_cache) process_data(data []byte) { func append_to_top_decls(decls map[string]*decl, decl ast.Decl, scope *scope) { foreach_decl(decl, func(data *foreach_decl_struct) { class := ast_decl_class(data.decl) + typeparams := ast_decl_typeparams(data.decl) for i, name := range data.names { typ, v, vi := data.type_value_index(i) d := new_decl_full(name.Name, class, ast_decl_flags(data.decl), typ, v, vi, scope) if d == nil { - return + continue } + d.typeparams = typeparams methodof := method_of(decl) if methodof != "" { @@ -149,17 +152,17 @@ func append_to_top_decls(decls map[string]*decl, decl ast.Decl, scope *scope) { }) } -func abs_path_for_package(filename, p string, context *package_lookup_context) (string, bool) { +func abs_path_for_package(filename, p string, context *package_lookup_context) (string, string, bool) { dir, _ := filepath.Split(filename) if len(p) == 0 { - return "", false + return "", "", false } if p[0] == '.' { - return fmt.Sprintf("%s.a", filepath.Join(dir, p)), true + return fmt.Sprintf("%s.a", filepath.Join(dir, p)), "", true } pkg, ok := find_go_dag_package(p, dir) if ok { - return pkg, true + return pkg, "", true } return find_global_file(p, context) } @@ -282,14 +285,15 @@ func log_build_context(context *package_lookup_context) { // find_global_file returns the file path of the compiled package corresponding to the specified // import, and a boolean stating whether such path is valid. // TODO: Return only one value, possibly empty string if not found. -func find_global_file(imp string, context *package_lookup_context) (string, bool) { +// pkgpath, update importpath, bool +func find_global_file(imp string, context *package_lookup_context) (string, string, bool) { // gocode synthetically generates the builtin package // "unsafe", since the "unsafe.a" package doesn't really exist. // Thus, when the user request for the package "unsafe" we // would return synthetic global file that would be used // just as a key name to find this synthetic package if imp == "unsafe" { - return "unsafe", true + return "unsafe", "", true } pkgfile := fmt.Sprintf("%s.a", imp) @@ -300,14 +304,14 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool pkg_path := filepath.Join(p, pkgfile) if file_exists(pkg_path) { log_found_package_maybe(imp, pkg_path) - return pkg_path, true + return pkg_path, "", true } // Also check the relevant pkg/OS_ARCH dir for the libpath, if provided. pkgdir := fmt.Sprintf("%s_%s", context.GOOS, context.GOARCH) pkg_path = filepath.Join(p, "pkg", pkgdir, pkgfile) if file_exists(pkg_path) { log_found_package_maybe(imp, pkg_path) - return pkg_path, true + return pkg_path, "", true } } } @@ -322,7 +326,7 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool pkg_path := filepath.Join(pkgdir, pkgfile) if file_exists(pkg_path) { log_found_package_maybe(imp, pkg_path) - return pkg_path, true + return pkg_path, "", true } } @@ -352,7 +356,7 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool if !fi.IsDir() && filepath.Ext(fi.Name()) == ".a" { pkg_path := filepath.Join(root, impath, fi.Name()) log_found_package_maybe(imp, pkg_path) - return pkg_path, true + return pkg_path, "", true } } } @@ -376,8 +380,8 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool try_autobuild(p) if file_exists(p.PkgObj) { log_found_package_maybe(imp, p.PkgObj) - return p.PkgObj, true } + return p.PkgObj, p.ImportPath, true } if package_path == "" { break @@ -391,12 +395,18 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool } } + for _, v := range g_daemon.autocomplete.typesWalker.Imported { + if v.Path() == imp { + return imp, v.Path(), true + } + } + if p, err := context.Import(imp, "", build.AllowBinary|build.FindOnly); err == nil { try_autobuild(p) if file_exists(p.PkgObj) { log_found_package_maybe(imp, p.PkgObj) - return p.PkgObj, true } + return p.PkgObj, p.ImportPath, true } if *g_debug { @@ -404,7 +414,7 @@ func find_global_file(imp string, context *package_lookup_context) (string, bool log.Println("Gocode's build context is:") log_build_context(context) } - return "", false + return "", "", false } func package_name(file *ast.File) string { diff --git a/formatters.go b/formatters.go index 4a9738c9..d5645198 100644 --- a/formatters.go +++ b/formatters.go @@ -13,6 +13,10 @@ type formatter interface { write_candidates(candidates []candidate, num int) } +type formatter_ex interface { + formatter +} + //------------------------------------------------------------------------- // nice_formatter (just for testing, simple textual output) //------------------------------------------------------------------------- diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..ae16e170 --- /dev/null +++ b/go.mod @@ -0,0 +1,8 @@ +module github.com/visualfc/gocode + +go 1.13 + +require ( + github.com/visualfc/gotools v1.5.3 + golang.org/x/tools v0.5.0 +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..a7ae3a19 --- /dev/null +++ b/go.sum @@ -0,0 +1,41 @@ +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/visualfc/gomod v0.1.2 h1:7qfPmifcA8r/0ZTpTPZQqsm5aJUiQ/EeyHEENPyywDg= +github.com/visualfc/gomod v0.1.2/go.mod h1:rV5goiA/Ul6yT8X2eDnc/dl0dVy0cDHJLZVOuJ8PdmM= +github.com/visualfc/gotools v1.5.3 h1:22TyNM6i+/psP07vjveWy6PhhMGNtwWEbDL8PrW4jms= +github.com/visualfc/gotools v1.5.3/go.mod h1:VlP59ccCsSmRbFZTbuHgQDrTOi40EGKvyFK2Cq2Far8= +github.com/visualfc/goversion v1.1.0/go.mod h1:Gr3s6bW8NTomhheImwAttqno97Mw6pAnFn2dU8/EMa8= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.5.0 h1:+bSpV5HIeWkuvgaMfI3UmKRThoTA5ODJTUd8T17NO+4= +golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/gocode.go b/gocode.go index b0dd5a27..e1ee5d43 100644 --- a/gocode.go +++ b/gocode.go @@ -11,13 +11,14 @@ import ( ) var ( - g_is_server = flag.Bool("s", false, "run a server instead of a client") - g_format = flag.String("f", "nice", "output format (vim | emacs | nice | csv | csv-with-package | json)") - g_input = flag.String("in", "", "use this file instead of stdin input") - g_sock = create_sock_flag("sock", "socket type (unix | tcp)") - g_addr = flag.String("addr", "127.0.0.1:37373", "address for tcp socket") - g_debug = flag.Bool("debug", false, "enable server-side debug mode") - g_profile = flag.Int("profile", 0, "port on which to expose profiling information for pprof; 0 to disable profiling") + g_is_server = flag.Bool("s", false, "run a server instead of a client") + g_format = flag.String("f", "nice", "output format (vim | emacs | nice | csv | csv-with-package | json)") + g_input = flag.String("in", "", "use this file instead of stdin input") + g_sock = create_sock_flag("sock", "socket type (unix | tcp)") + g_addr = flag.String("addr", "127.0.0.1:37372", "address for tcp socket") + g_debug = flag.Bool("debug", false, "enable server-side debug mode") + g_profile = flag.Int("profile", 0, "port on which to expose profiling information for pprof; 0 to disable profiling") + g_daemon_name = flag.String("daemon", "gocode-daemon-liteide", "unix socket daemon prefix name") ) func get_socket_filename() string { @@ -25,7 +26,7 @@ func get_socket_filename() string { if user == "" { user = "all" } - return filepath.Join(os.TempDir(), fmt.Sprintf("gocode-daemon.%s", user)) + return filepath.Join(os.TempDir(), fmt.Sprintf("%s.%s", *g_daemon_name, user)) } func show_usage() { diff --git a/internal/gcexportdata/example_test.go b/internal/gcexportdata/example_test.go new file mode 100644 index 00000000..a50bc40b --- /dev/null +++ b/internal/gcexportdata/example_test.go @@ -0,0 +1,126 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.7 +// +build gc + +package gcexportdata_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "log" + "os" + "path/filepath" + + "golang.org/x/tools/go/gcexportdata" +) + +// ExampleRead uses gcexportdata.Read to load type information for the +// "fmt" package from the fmt.a file produced by the gc compiler. +func ExampleRead() { + // Find the export data file. + filename, path := gcexportdata.Find("fmt", "") + if filename == "" { + log.Fatalf("can't find export data for fmt") + } + fmt.Printf("Package path: %s\n", path) + fmt.Printf("Export data: %s\n", filepath.Base(filename)) + + // Open and read the file. + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("reading export data %s: %v", filename, err) + } + + // Decode the export data. + fset := token.NewFileSet() + imports := make(map[string]*types.Package) + pkg, err := gcexportdata.Read(r, fset, imports, path) + if err != nil { + log.Fatal(err) + } + + // Print package information. + members := pkg.Scope().Names() + if members[0] == ".inittask" { + // An improvement to init handling in 1.13 added ".inittask". Remove so go >= 1.13 and go < 1.13 both pass. + members = members[1:] + } + fmt.Printf("Package members: %s...\n", members[:5]) + println := pkg.Scope().Lookup("Println") + posn := fset.Position(println.Pos()) + posn.Line = 123 // make example deterministic + fmt.Printf("Println type: %s\n", println.Type()) + fmt.Printf("Println location: %s\n", slashify(posn)) + + // Output: + // + // Package path: fmt + // Export data: fmt.a + // Package members: [Errorf Formatter Fprint Fprintf Fprintln]... + // Println type: func(a ...interface{}) (n int, err error) + // Println location: $GOROOT/src/fmt/print.go:123:1 +} + +// ExampleNewImporter demonstrates usage of NewImporter to provide type +// information for dependencies when type-checking Go source code. +func ExampleNewImporter() { + const src = `package myrpc + +// choosing a package that doesn't change across releases +import "net/rpc" + +const serverError rpc.ServerError = "" +` + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "myrpc.go", src, 0) + if err != nil { + log.Fatal(err) + } + + packages := make(map[string]*types.Package) + imp := gcexportdata.NewImporter(fset, packages) + conf := types.Config{Importer: imp} + pkg, err := conf.Check("myrpc", fset, []*ast.File{f}, nil) + if err != nil { + log.Fatal(err) + } + + // object from imported package + pi := packages["net/rpc"].Scope().Lookup("ServerError") + fmt.Printf("type %s.%s %s // %s\n", + pi.Pkg().Path(), + pi.Name(), + pi.Type().Underlying(), + slashify(fset.Position(pi.Pos())), + ) + + // object in source package + twopi := pkg.Scope().Lookup("serverError") + fmt.Printf("const %s %s = %s // %s\n", + twopi.Name(), + twopi.Type(), + twopi.(*types.Const).Val(), + slashify(fset.Position(twopi.Pos())), + ) + + // Output: + // + // type net/rpc.ServerError string // $GOROOT/src/net/rpc/client.go:20:1 + // const serverError net/rpc.ServerError = "" // myrpc.go:6:7 +} + +func slashify(posn token.Position) token.Position { + posn.Filename = filepath.ToSlash(posn.Filename) // for MS Windows portability + return posn +} diff --git a/internal/gcexportdata/gcexportdata.go b/internal/gcexportdata/gcexportdata.go new file mode 100644 index 00000000..250de66f --- /dev/null +++ b/internal/gcexportdata/gcexportdata.go @@ -0,0 +1,109 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gcexportdata provides functions for locating, reading, and +// writing export data files containing type information produced by the +// gc compiler. This package supports go1.7 export data format and all +// later versions. +// +// Although it might seem convenient for this package to live alongside +// go/types in the standard library, this would cause version skew +// problems for developer tools that use it, since they must be able to +// consume the outputs of the gc compiler both before and after a Go +// update such as from Go 1.7 to Go 1.8. Because this package lives in +// golang.org/x/tools, sites can update their version of this repo some +// time before the Go 1.8 release and rebuild and redeploy their +// developer tools, which will then be able to consume both Go 1.7 and +// Go 1.8 export data files, so they will work before and after the +// Go update. (See discussion at https://golang.org/issue/15651.) +// +package gcexportdata + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "go/types" + "io" + "io/ioutil" + + "github.com/visualfc/gocode/internal/gcimporter" +) + +// Find returns the name of an object (.o) or archive (.a) file +// containing type information for the specified import path, +// using the workspace layout conventions of go/build. +// If no file was found, an empty filename is returned. +// +// A relative srcDir is interpreted relative to the current working directory. +// +// Find also returns the package's resolved (canonical) import path, +// reflecting the effects of srcDir and vendoring on importPath. +func Find(importPath, srcDir string) (filename, path string) { + return gcimporter.FindPkg(importPath, srcDir) +} + +// NewReader returns a reader for the export data section of an object +// (.o) or archive (.a) file read from r. The new reader may provide +// additional trailing data beyond the end of the export data. +func NewReader(r io.Reader) (io.Reader, error) { + buf := bufio.NewReader(r) + _, err := gcimporter.FindExportData(buf) + // If we ever switch to a zip-like archive format with the ToC + // at the end, we can return the correct portion of export data, + // but for now we must return the entire rest of the file. + return buf, err +} + +// Read reads export data from in, decodes it, and returns type +// information for the package. +// The package name is specified by path. +// File position information is added to fset. +// +// Read may inspect and add to the imports map to ensure that references +// within the export data to other packages are consistent. The caller +// must ensure that imports[path] does not exist, or exists but is +// incomplete (see types.Package.Complete), and Read inserts the +// resulting package into this map entry. +// +// On return, the state of the reader is undefined. +func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } + + if bytes.HasPrefix(data, []byte("!")) { + return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) + } + + // The App Engine Go runtime v1.6 uses the old export data format. + // TODO(adonovan): delete once v1.7 has been around for a while. + if bytes.HasPrefix(data, []byte("package ")) { + return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) + } + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) + return pkg, err + } + + _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + return pkg, err +} + +// Write writes encoded type information for the specified package to out. +// The FileSet provides file position information for named objects. +func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + b, err := gcimporter.BExportData(fset, pkg) + if err != nil { + return err + } + _, err = out.Write(b) + return err +} diff --git a/internal/gcexportdata/gcexportdata_test.go b/internal/gcexportdata/gcexportdata_test.go new file mode 100644 index 00000000..69133db9 --- /dev/null +++ b/internal/gcexportdata/gcexportdata_test.go @@ -0,0 +1,41 @@ +package gcexportdata_test + +import ( + "go/token" + "go/types" + "log" + "os" + "testing" + + "golang.org/x/tools/go/gcexportdata" +) + +// Test to ensure that gcexportdata can read files produced by App +// Engine Go runtime v1.6. +func TestAppEngine16(t *testing.T) { + // Open and read the file. + f, err := os.Open("testdata/errors-ae16.a") + if err != nil { + t.Fatal(err) + } + defer f.Close() + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("reading export data: %v", err) + } + + // Decode the export data. + fset := token.NewFileSet() + imports := make(map[string]*types.Package) + pkg, err := gcexportdata.Read(r, fset, imports, "errors") + if err != nil { + log.Fatal(err) + } + + // Print package information. + got := pkg.Scope().Lookup("New").Type().String() + want := "func(text string) error" + if got != want { + t.Errorf("New.Type = %s, want %s", got, want) + } +} diff --git a/internal/gcexportdata/importer.go b/internal/gcexportdata/importer.go new file mode 100644 index 00000000..efe221e7 --- /dev/null +++ b/internal/gcexportdata/importer.go @@ -0,0 +1,73 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcexportdata + +import ( + "fmt" + "go/token" + "go/types" + "os" +) + +// NewImporter returns a new instance of the types.Importer interface +// that reads type information from export data files written by gc. +// The Importer also satisfies types.ImporterFrom. +// +// Export data files are located using "go build" workspace conventions +// and the build.Default context. +// +// Use this importer instead of go/importer.For("gc", ...) to avoid the +// version-skew problems described in the documentation of this package, +// or to control the FileSet or access the imports map populated during +// package loading. +// +func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { + return importer{fset, imports} +} + +type importer struct { + fset *token.FileSet + imports map[string]*types.Package +} + +func (imp importer) Import(importPath string) (*types.Package, error) { + return imp.ImportFrom(importPath, "", 0) +} + +func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { + filename, path := Find(importPath, srcDir) + if filename == "" { + if importPath == "unsafe" { + // Even for unsafe, call Find first in case + // the package was vendored. + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %s", importPath) + } + + if pkg, ok := imp.imports[path]; ok && pkg.Complete() { + return pkg, nil // cache hit + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + r, err := NewReader(f) + if err != nil { + return nil, err + } + + return Read(r, imp.fset, imp.imports, path) +} diff --git a/internal/gcexportdata/main.go b/internal/gcexportdata/main.go new file mode 100644 index 00000000..2713dce6 --- /dev/null +++ b/internal/gcexportdata/main.go @@ -0,0 +1,99 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// The gcexportdata command is a diagnostic tool that displays the +// contents of gc export data files. +package main + +import ( + "flag" + "fmt" + "go/token" + "go/types" + "log" + "os" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/types/typeutil" +) + +var packageFlag = flag.String("package", "", "alternative package to print") + +func main() { + log.SetPrefix("gcexportdata: ") + log.SetFlags(0) + flag.Usage = func() { + fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a") + } + flag.Parse() + if flag.NArg() != 1 { + flag.Usage() + os.Exit(2) + } + filename := flag.Args()[0] + + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Decode the package. + const primary = "" + imports := make(map[string]*types.Package) + fset := token.NewFileSet() + pkg, err := gcexportdata.Read(r, fset, imports, primary) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Optionally select an indirectly mentioned package. + if *packageFlag != "" { + pkg = imports[*packageFlag] + if pkg == nil { + fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n", + filename, *packageFlag) + for p := range imports { + if p != primary { + fmt.Fprintf(os.Stderr, "\t%s\n", p) + } + } + os.Exit(1) + } + } + + // Print all package-level declarations, including non-exported ones. + fmt.Printf("package %s\n", pkg.Name()) + for _, imp := range pkg.Imports() { + fmt.Printf("import %q\n", imp.Path()) + } + qual := func(p *types.Package) string { + if pkg == p { + return "" + } + return p.Name() + } + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + fmt.Printf("%s: %s\n", + fset.Position(obj.Pos()), + types.ObjectString(obj, qual)) + + // For types, print each method. + if _, ok := obj.(*types.TypeName); ok { + for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + fmt.Printf("%s: %s\n", + fset.Position(method.Obj().Pos()), + types.SelectionString(method, qual)) + } + } + } +} diff --git a/internal/gcimporter/bexport.go b/internal/gcimporter/bexport.go new file mode 100644 index 00000000..06bafe3a --- /dev/null +++ b/internal/gcimporter/bexport.go @@ -0,0 +1,886 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "math" + "math/big" + "sort" + "strings" +) + +// If debugFormat is set, each integer and string value is preceded by a marker +// and position information in the encoding. This mechanism permits an importer +// to recognize immediately when it is out of sync. The importer recognizes this +// mode automatically (i.e., it can import export data produced with debugging +// support even if debugFormat is not set at the time of import). This mode will +// lead to massively larger export data (by a factor of 2 to 3) and should only +// be enabled during development and debugging. +// +// NOTE: This flag is the first flag to enable if importing dies because of +// (suspected) format errors, and whenever a change is made to the format. +const debugFormat = false // default: false + +// If trace is set, debugging output is printed to std out. +const trace = false // default: false + +// Current export format version. Increase with each format change. +// Note: The latest binary (non-indexed) export format is at version 6. +// +// This exporter is still at level 4, but it doesn't matter since +// the binary importer can handle older versions just fine. +// +// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE +// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE +// 4: type name objects support type aliases, uses aliasTag +// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) +// 2: removed unused bool in ODCL export (compiler only) +// 1: header format change (more regular), export package for _ struct fields +// 0: Go1.7 encoding +const exportVersion = 4 + +// trackAllTypes enables cycle tracking for all types, not just named +// types. The existing compiler invariants assume that unnamed types +// that are not completely set up are not used, or else there are spurious +// errors. +// If disabled, only named types are tracked, possibly leading to slightly +// less efficient encoding in rare cases. It also prevents the export of +// some corner-case type declarations (but those are not handled correctly +// with with the textual export format either). +// TODO(gri) enable and remove once issues caused by it are fixed +const trackAllTypes = false + +type exporter struct { + fset *token.FileSet + out bytes.Buffer + + // object -> index maps, indexed in order of serialization + strIndex map[string]int + pkgIndex map[*types.Package]int + typIndex map[types.Type]int + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + written int // bytes written + indent int // for trace +} + +// internalError represents an error generated inside this package. +type internalError string + +func (e internalError) Error() string { return "gcimporter: " + string(e) } + +func internalErrorf(format string, args ...interface{}) error { + return internalError(fmt.Sprintf(format, args...)) +} + +// BExportData returns binary export data for pkg. +// If no file set is provided, position info will be missing. +func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := exporter{ + fset: fset, + strIndex: map[string]int{"": 0}, // empty string is mapped to 0 + pkgIndex: make(map[*types.Package]int), + typIndex: make(map[types.Type]int), + posInfoFormat: true, // TODO(gri) might become a flag, eventually + } + + // write version info + // The version string must start with "version %d" where %d is the version + // number. Additional debugging information may follow after a blank; that + // text is ignored by the importer. + p.rawStringln(fmt.Sprintf("version %d", exportVersion)) + var debug string + if debugFormat { + debug = "debug" + } + p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly + p.bool(trackAllTypes) + p.bool(p.posInfoFormat) + + // --- generic export data --- + + // populate type map with predeclared "known" types + for index, typ := range predeclared() { + p.typIndex[typ] = index + } + if len(p.typIndex) != len(predeclared()) { + return nil, internalError("duplicate entries in type map?") + } + + // write package data + p.pkg(pkg, true) + if trace { + p.tracef("\n") + } + + // write objects + objcount := 0 + scope := pkg.Scope() + for _, name := range scope.Names() { + if !ast.IsExported(name) { + continue + } + if trace { + p.tracef("\n") + } + p.obj(scope.Lookup(name)) + objcount++ + } + + // indicate end of list + if trace { + p.tracef("\n") + } + p.tag(endTag) + + // for self-verification only (redundant) + p.int(objcount) + + if trace { + p.tracef("\n") + } + + // --- end of export data --- + + return p.out.Bytes(), nil +} + +func (p *exporter) pkg(pkg *types.Package, emptypath bool) { + if pkg == nil { + panic(internalError("unexpected nil pkg")) + } + + // if we saw the package before, write its index (>= 0) + if i, ok := p.pkgIndex[pkg]; ok { + p.index('P', i) + return + } + + // otherwise, remember the package, write the package tag (< 0) and package data + if trace { + p.tracef("P%d = { ", len(p.pkgIndex)) + defer p.tracef("} ") + } + p.pkgIndex[pkg] = len(p.pkgIndex) + + p.tag(packageTag) + p.string(pkg.Name()) + if emptypath { + p.string("") + } else { + p.string(pkg.Path()) + } +} + +func (p *exporter) obj(obj types.Object) { + switch obj := obj.(type) { + case *types.Const: + p.tag(constTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + p.value(obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + p.tag(aliasTag) + p.pos(obj) + p.qualifiedName(obj) + } else { + p.tag(typeTag) + } + p.typ(obj.Type()) + + case *types.Var: + p.tag(varTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + + case *types.Func: + sig := obj.Type().(*types.Signature) + tp := typeParamsForSig(sig) + if tp != nil { + p.tag(func2Tag) + p.paramList(typeParamsToTuple(tp), false) + } else { + p.tag(funcTag) + } + p.pos(obj) + p.qualifiedName(obj) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + + default: + panic(internalErrorf("unexpected object %v (%T)", obj, obj)) + } +} + +func (p *exporter) pos(obj types.Object) { + if !p.posInfoFormat { + return + } + + file, line := p.fileLine(obj) + if file == p.prevFile { + // common case: write line delta + // delta == 0 means different file or no line change + delta := line - p.prevLine + p.int(delta) + if delta == 0 { + p.int(-1) // -1 means no file change + } + } else { + // different file + p.int(0) + // Encode filename as length of common prefix with previous + // filename, followed by (possibly empty) suffix. Filenames + // frequently share path prefixes, so this can save a lot + // of space and make export data size less dependent on file + // path length. The suffix is unlikely to be empty because + // file names tend to end in ".go". + n := commonPrefixLen(p.prevFile, file) + p.int(n) // n >= 0 + p.string(file[n:]) // write suffix only + p.prevFile = file + p.int(line) + } + p.prevLine = line +} + +func (p *exporter) fileLine(obj types.Object) (file string, line int) { + if p.fset != nil { + pos := p.fset.Position(obj.Pos()) + file = pos.Filename + line = pos.Line + } + return +} + +func commonPrefixLen(a, b string) int { + if len(a) > len(b) { + a, b = b, a + } + // len(a) <= len(b) + i := 0 + for i < len(a) && a[i] == b[i] { + i++ + } + return i +} + +func (p *exporter) qualifiedName(obj types.Object) { + p.string(obj.Name()) + p.pkg(obj.Pkg(), false) +} + +func (p *exporter) typ(t types.Type) { + if t == nil { + panic(internalError("nil type")) + } + + // check named.Origin + t = originType(t) + + // Possible optimization: Anonymous pointer types *T where + // T is a named type are common. We could canonicalize all + // such types *T to a single type PT = *T. This would lead + // to at most one *T entry in typIndex, and all future *T's + // would be encoded as the respective index directly. Would + // save 1 byte (pointerTag) per *T and reduce the typIndex + // size (at the cost of a canonicalization map). We can do + // this later, without encoding format change. + + // if we saw the type before, write its index (>= 0) + + if i, ok := p.typIndex[t]; ok { + p.index('T', i) + return + } + + // otherwise, remember the type, write the type tag (< 0) and type data + if trackAllTypes { + if trace { + p.tracef("T%d = {>\n", len(p.typIndex)) + defer p.tracef("<\n} ") + } + p.typIndex[t] = len(p.typIndex) + } + + switch t := t.(type) { + case *types.Named: + if !trackAllTypes { + // if we don't track all types, track named types now + p.typIndex[t] = len(p.typIndex) + } + + tp := typeParamsForNamed(t) + if tp != nil { + p.tag(named2Tag) + p.paramList(typeParamsToTuple(tp), false) + } else { + p.tag(namedTag) + } + p.pos(t.Obj()) + p.qualifiedName(t.Obj()) + p.typ(t.Underlying()) + if !types.IsInterface(t) { + p.assocMethods(t) + } + + case *types.Array: + p.tag(arrayTag) + p.int64(t.Len()) + p.typ(t.Elem()) + + case *types.Slice: + p.tag(sliceTag) + p.typ(t.Elem()) + + case *dddSlice: + p.tag(dddTag) + p.typ(t.elem) + + case *types.Struct: + p.tag(structTag) + p.fieldList(t) + + case *types.Pointer: + p.tag(pointerTag) + p.typ(t.Elem()) + + case *types.Signature: + p.tag(signatureTag) + p.paramList(t.Params(), t.Variadic()) + p.paramList(t.Results(), false) + + case *types.Interface: + p.tag(interfaceTag) + p.iface(t) + + case *types.Map: + p.tag(mapTag) + p.typ(t.Key()) + p.typ(t.Elem()) + + case *types.Chan: + p.tag(chanTag) + p.int(int(3 - t.Dir())) // hack + p.typ(t.Elem()) + + case *TypeParam: + p.tag(typeParamTag) + p.typ(t.Constraint()) + case *Union: + p.tag(unionTag) + n := t.Len() + p.int(n) + for i := 0; i < n; i++ { + term := t.Term(i) + p.bool(term.Tilde()) + p.typ(term.Type()) + } + default: + panic(internalErrorf("unexpected type %T: %s", t, t)) + } +} + +func (p *exporter) assocMethods(named *types.Named) { + // Sort methods (for determinism). + var methods []*types.Func + for i := 0; i < named.NumMethods(); i++ { + methods = append(methods, named.Method(i)) + } + sort.Sort(methodsByName(methods)) + + p.int(len(methods)) + + if trace && methods != nil { + p.tracef("associated methods {>\n") + } + + for i, m := range methods { + if trace && i > 0 { + p.tracef("\n") + } + + p.pos(m) + name := m.Name() + p.string(name) + if !exported(name) { + p.pkg(m.Pkg(), false) + } + + sig := m.Type().(*types.Signature) + p.paramList(types.NewTuple(sig.Recv()), false) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + p.int(0) // dummy value for go:nointerface pragma - ignored by importer + } + + if trace && methods != nil { + p.tracef("<\n} ") + } +} + +type methodsByName []*types.Func + +func (x methodsByName) Len() int { return len(x) } +func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } + +func (p *exporter) fieldList(t *types.Struct) { + if trace && t.NumFields() > 0 { + p.tracef("fields {>\n") + defer p.tracef("<\n} ") + } + + p.int(t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.field(t.Field(i)) + p.string(t.Tag(i)) + } +} + +func (p *exporter) field(f *types.Var) { + if !f.IsField() { + panic(internalError("field expected")) + } + + p.pos(f) + p.fieldName(f) + p.typ(f.Type()) +} + +func (p *exporter) iface(t *types.Interface) { + // TODO(gri): enable importer to load embedded interfaces, + // then emit Embeddeds and ExplicitMethods separately here. + n := t.NumEmbeddeds() + p.int(n) + for i := 0; i < n; i++ { + p.typ(t.EmbeddedType(i)) + } + + n = t.NumMethods() + if trace && n > 0 { + p.tracef("methods {>\n") + defer p.tracef("<\n} ") + } + p.int(n) + for i := 0; i < n; i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.method(t.Method(i)) + } +} + +func (p *exporter) method(m *types.Func) { + sig := m.Type().(*types.Signature) + if sig.Recv() == nil { + panic(internalError("method expected")) + } + + p.pos(m) + p.string(m.Name()) + if m.Name() != "_" && !ast.IsExported(m.Name()) { + p.pkg(m.Pkg(), false) + } + + // interface method; no need to encode receiver. + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) +} + +func (p *exporter) fieldName(f *types.Var) { + name := f.Name() + + if f.Anonymous() { + // anonymous field - we distinguish between 3 cases: + // 1) field name matches base type name and is exported + // 2) field name matches base type name and is not exported + // 3) field name doesn't match base type name (alias name) + bname := basetypeName(f.Type()) + if name == bname { + if ast.IsExported(name) { + name = "" // 1) we don't need to know the field name or package + } else { + name = "?" // 2) use unexported name "?" to force package export + } + } else { + // 3) indicate alias and export name as is + // (this requires an extra "@" but this is a rare case) + p.string("@") + } + } + + p.string(name) + if name != "" && !ast.IsExported(name) { + p.pkg(f.Pkg(), false) + } +} + +func basetypeName(typ types.Type) string { + switch typ := deref(typ).(type) { + case *types.Basic: + return typ.Name() + case *types.Named: + return typ.Obj().Name() + default: + return "" // unnamed type + } +} + +func (p *exporter) paramList(params *types.Tuple, variadic bool) { + // use negative length to indicate unnamed parameters + // (look at the first parameter only since either all + // names are present or all are absent) + n := params.Len() + if n > 0 && params.At(0).Name() == "" { + n = -n + } + p.int(n) + for i := 0; i < params.Len(); i++ { + q := params.At(i) + t := q.Type() + if variadic && i == params.Len()-1 { + t = &dddSlice{t.(*types.Slice).Elem()} + } + p.typ(t) + if n > 0 { + name := q.Name() + p.string(name) + if name != "_" { + p.pkg(q.Pkg(), false) + } + } + p.string("") // no compiler-specific info + } +} + +func (p *exporter) value(x constant.Value) { + if trace { + p.tracef("= ") + } + + switch x.Kind() { + case constant.Bool: + tag := falseTag + if constant.BoolVal(x) { + tag = trueTag + } + p.tag(tag) + + case constant.Int: + if v, exact := constant.Int64Val(x); exact { + // common case: x fits into an int64 - use compact encoding + p.tag(int64Tag) + p.int64(v) + return + } + // uncommon case: large x - use float encoding + // (powers of 2 will be encoded efficiently with exponent) + p.tag(floatTag) + p.float(constant.ToFloat(x)) + + case constant.Float: + p.tag(floatTag) + p.float(x) + + case constant.Complex: + p.tag(complexTag) + p.float(constant.Real(x)) + p.float(constant.Imag(x)) + + case constant.String: + p.tag(stringTag) + p.string(constant.StringVal(x)) + + case constant.Unknown: + // package contains type errors + p.tag(unknownTag) + + default: + panic(internalErrorf("unexpected value %v (%T)", x, x)) + } +} + +func (p *exporter) float(x constant.Value) { + if x.Kind() != constant.Float { + panic(internalErrorf("unexpected constant %v, want float", x)) + } + // extract sign (there is no -0) + sign := constant.Sign(x) + if sign == 0 { + // x == 0 + p.int(0) + return + } + // x != 0 + + var f big.Float + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + r := valueToRat(num) + f.SetRat(r.Quo(r, valueToRat(denom))) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + f.SetFloat64(math.MaxFloat64) // FIXME + } + + // extract exponent such that 0.5 <= m < 1.0 + var m big.Float + exp := f.MantExp(&m) + + // extract mantissa as *big.Int + // - set exponent large enough so mant satisfies mant.IsInt() + // - get *big.Int from mant + m.SetMantExp(&m, int(m.MinPrec())) + mant, acc := m.Int(nil) + if acc != big.Exact { + panic(internalError("internal error")) + } + + p.int(sign) + p.int(exp) + p.string(string(mant.Bytes())) +} + +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + +func (p *exporter) bool(b bool) bool { + if trace { + p.tracef("[") + defer p.tracef("= %v] ", b) + } + + x := 0 + if b { + x = 1 + } + p.int(x) + return b +} + +// ---------------------------------------------------------------------------- +// Low-level encoders + +func (p *exporter) index(marker byte, index int) { + if index < 0 { + panic(internalError("invalid index < 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%c%d ", marker, index) + } + p.rawInt64(int64(index)) +} + +func (p *exporter) tag(tag int) { + if tag >= 0 { + panic(internalError("invalid tag >= 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%s ", tagString[-tag]) + } + p.rawInt64(int64(tag)) +} + +func (p *exporter) int(x int) { + p.int64(int64(x)) +} + +func (p *exporter) int64(x int64) { + if debugFormat { + p.marker('i') + } + if trace { + p.tracef("%d ", x) + } + p.rawInt64(x) +} + +func (p *exporter) string(s string) { + if debugFormat { + p.marker('s') + } + if trace { + p.tracef("%q ", s) + } + // if we saw the string before, write its index (>= 0) + // (the empty string is mapped to 0) + if i, ok := p.strIndex[s]; ok { + p.rawInt64(int64(i)) + return + } + // otherwise, remember string and write its negative length and bytes + p.strIndex[s] = len(p.strIndex) + p.rawInt64(-int64(len(s))) + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } +} + +// marker emits a marker byte and position information which makes +// it easy for a reader to detect if it is "out of sync". Used for +// debugFormat format only. +func (p *exporter) marker(m byte) { + p.rawByte(m) + // Enable this for help tracking down the location + // of an incorrect marker when running in debugFormat. + if false && trace { + p.tracef("#%d ", p.written) + } + p.rawInt64(int64(p.written)) +} + +// rawInt64 should only be used by low-level encoders. +func (p *exporter) rawInt64(x int64) { + var tmp [binary.MaxVarintLen64]byte + n := binary.PutVarint(tmp[:], x) + for i := 0; i < n; i++ { + p.rawByte(tmp[i]) + } +} + +// rawStringln should only be used to emit the initial version string. +func (p *exporter) rawStringln(s string) { + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } + p.rawByte('\n') +} + +// rawByte is the bottleneck interface to write to p.out. +// rawByte escapes b as follows (any encoding does that +// hides '$'): +// +// '$' => '|' 'S' +// '|' => '|' '|' +// +// Necessary so other tools can find the end of the +// export data by searching for "$$". +// rawByte should only be used by low-level encoders. +func (p *exporter) rawByte(b byte) { + switch b { + case '$': + // write '$' as '|' 'S' + b = 'S' + fallthrough + case '|': + // write '|' as '|' '|' + p.out.WriteByte('|') + p.written++ + } + p.out.WriteByte(b) + p.written++ +} + +// tracef is like fmt.Printf but it rewrites the format string +// to take care of indentation. +func (p *exporter) tracef(format string, args ...interface{}) { + if strings.ContainsAny(format, "<>\n") { + var buf bytes.Buffer + for i := 0; i < len(format); i++ { + // no need to deal with runes + ch := format[i] + switch ch { + case '>': + p.indent++ + continue + case '<': + p.indent-- + continue + } + buf.WriteByte(ch) + if ch == '\n' { + for j := p.indent; j > 0; j-- { + buf.WriteString(". ") + } + } + } + format = buf.String() + } + fmt.Printf(format, args...) +} + +// Debugging support. +// (tagString is only used when tracing is enabled) +var tagString = [...]string{ + // Packages + -packageTag: "package", + + // Types + -namedTag: "named type", + -arrayTag: "array", + -sliceTag: "slice", + -dddTag: "ddd", + -structTag: "struct", + -pointerTag: "pointer", + -signatureTag: "signature", + -interfaceTag: "interface", + -mapTag: "map", + -chanTag: "chan", + + // Values + -falseTag: "false", + -trueTag: "true", + -int64Tag: "int64", + -floatTag: "float", + -fractionTag: "fraction", + -complexTag: "complex", + -stringTag: "string", + -unknownTag: "unknown", + + // Type aliases + -aliasTag: "alias", +} diff --git a/internal/gcimporter/bexport_test.go b/internal/gcimporter/bexport_test.go new file mode 100644 index 00000000..82a0806a --- /dev/null +++ b/internal/gcimporter/bexport_test.go @@ -0,0 +1,419 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcimporter_test + +import ( + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/parser" + "go/token" + "go/types" + "reflect" + "runtime" + "strings" + "testing" + + "github.com/visualfc/gocode/internal/gcimporter" + "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/go/loader" +) + +var isRace = false + +func TestBExportData_stdlib(t *testing.T) { + if runtime.Compiler == "gccgo" { + t.Skip("gccgo standard library is inaccessible") + } + if runtime.GOOS == "android" { + t.Skipf("incomplete std lib on %s", runtime.GOOS) + } + if isRace { + t.Skipf("stdlib tests take too long in race mode and flake on builders") + } + + // Load, parse and type-check the program. + ctxt := build.Default // copy + ctxt.GOPATH = "" // disable GOPATH + conf := loader.Config{ + Build: &ctxt, + AllowErrors: true, + } + for _, path := range buildutil.AllPackages(conf.Build) { + conf.Import(path) + } + + // Create a package containing type and value errors to ensure + // they are properly encoded/decoded. + f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors +const UnknownValue = "" + 0 +type UnknownType undefined +`) + if err != nil { + t.Fatal(err) + } + conf.CreateFromFiles("haserrors", f) + + prog, err := conf.Load() + if err != nil { + t.Fatalf("Load failed: %v", err) + } + + numPkgs := len(prog.AllPackages) + if want := 248; numPkgs < want { + t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) + } + + for pkg, info := range prog.AllPackages { + if info.Files == nil { + continue // empty directory + } + exportdata, err := gcimporter.BExportData(conf.Fset, pkg) + if err != nil { + t.Fatal(err) + } + + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + n, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Errorf("BImportData(%s): %v", pkg.Path(), err) + continue + } + if n != len(exportdata) { + t.Errorf("BImportData(%s) decoded %d bytes, want %d", + pkg.Path(), n, len(exportdata)) + } + + // Compare the packages' corresponding members. + for _, name := range pkg.Scope().Names() { + if !ast.IsExported(name) { + continue + } + obj1 := pkg.Scope().Lookup(name) + obj2 := pkg2.Scope().Lookup(name) + if obj2 == nil { + t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1) + continue + } + + fl1 := fileLine(conf.Fset, obj1) + fl2 := fileLine(fset2, obj2) + if fl1 != fl2 { + t.Errorf("%s.%s: got posn %s, want %s", + pkg.Path(), name, fl2, fl1) + } + + if err := equalObj(obj1, obj2); err != nil { + t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", + pkg.Path(), name, err, obj2, obj1) + } + } + } +} + +func fileLine(fset *token.FileSet, obj types.Object) string { + posn := fset.Position(obj.Pos()) + return fmt.Sprintf("%s:%d", posn.Filename, posn.Line) +} + +// equalObj reports how x and y differ. They are assumed to belong to +// different universes so cannot be compared directly. +func equalObj(x, y types.Object) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("%T vs %T", x, y) + } + xt := x.Type() + yt := y.Type() + switch x.(type) { + case *types.Var, *types.Func: + // ok + case *types.Const: + xval := x.(*types.Const).Val() + yval := y.(*types.Const).Val() + // Use string comparison for floating-point values since rounding is permitted. + if constant.Compare(xval, token.NEQ, yval) && + !(xval.Kind() == constant.Float && xval.String() == yval.String()) { + return fmt.Errorf("unequal constants %s vs %s", xval, yval) + } + case *types.TypeName: + xt = xt.Underlying() + yt = yt.Underlying() + default: + return fmt.Errorf("unexpected %T", x) + } + return equalType(xt, yt) +} + +func equalType(x, y types.Type) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("unequal kinds: %T vs %T", x, y) + } + switch x := x.(type) { + case *types.Interface: + y := y.(*types.Interface) + // TODO(gri): enable separate emission of Embedded interfaces + // and ExplicitMethods then use this logic. + // if x.NumEmbeddeds() != y.NumEmbeddeds() { + // return fmt.Errorf("unequal number of embedded interfaces: %d vs %d", + // x.NumEmbeddeds(), y.NumEmbeddeds()) + // } + // for i := 0; i < x.NumEmbeddeds(); i++ { + // xi := x.Embedded(i) + // yi := y.Embedded(i) + // if xi.String() != yi.String() { + // return fmt.Errorf("mismatched %th embedded interface: %s vs %s", + // i, xi, yi) + // } + // } + // if x.NumExplicitMethods() != y.NumExplicitMethods() { + // return fmt.Errorf("unequal methods: %d vs %d", + // x.NumExplicitMethods(), y.NumExplicitMethods()) + // } + // for i := 0; i < x.NumExplicitMethods(); i++ { + // xm := x.ExplicitMethod(i) + // ym := y.ExplicitMethod(i) + // if xm.Name() != ym.Name() { + // return fmt.Errorf("mismatched %th method: %s vs %s", i, xm, ym) + // } + // if err := equalType(xm.Type(), ym.Type()); err != nil { + // return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) + // } + // } + if x.NumMethods() != y.NumMethods() { + return fmt.Errorf("unequal methods: %d vs %d", + x.NumMethods(), y.NumMethods()) + } + for i := 0; i < x.NumMethods(); i++ { + xm := x.Method(i) + ym := y.Method(i) + if xm.Name() != ym.Name() { + return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym) + } + if err := equalType(xm.Type(), ym.Type()); err != nil { + return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) + } + } + case *types.Array: + y := y.(*types.Array) + if x.Len() != y.Len() { + return fmt.Errorf("unequal array lengths: %d vs %d", x.Len(), y.Len()) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("array elements: %s", err) + } + case *types.Basic: + y := y.(*types.Basic) + if x.Kind() != y.Kind() { + return fmt.Errorf("unequal basic types: %s vs %s", x, y) + } + case *types.Chan: + y := y.(*types.Chan) + if x.Dir() != y.Dir() { + return fmt.Errorf("unequal channel directions: %d vs %d", x.Dir(), y.Dir()) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("channel elements: %s", err) + } + case *types.Map: + y := y.(*types.Map) + if err := equalType(x.Key(), y.Key()); err != nil { + return fmt.Errorf("map keys: %s", err) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("map values: %s", err) + } + case *types.Named: + y := y.(*types.Named) + if x.String() != y.String() { + return fmt.Errorf("unequal named types: %s vs %s", x, y) + } + case *types.Pointer: + y := y.(*types.Pointer) + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("pointer elements: %s", err) + } + case *types.Signature: + y := y.(*types.Signature) + if err := equalType(x.Params(), y.Params()); err != nil { + return fmt.Errorf("parameters: %s", err) + } + if err := equalType(x.Results(), y.Results()); err != nil { + return fmt.Errorf("results: %s", err) + } + if x.Variadic() != y.Variadic() { + return fmt.Errorf("unequal varidicity: %t vs %t", + x.Variadic(), y.Variadic()) + } + if (x.Recv() != nil) != (y.Recv() != nil) { + return fmt.Errorf("unequal receivers: %s vs %s", x.Recv(), y.Recv()) + } + if x.Recv() != nil { + // TODO(adonovan): fix: this assertion fires for interface methods. + // The type of the receiver of an interface method is a named type + // if the Package was loaded from export data, or an unnamed (interface) + // type if the Package was produced by type-checking ASTs. + // if err := equalType(x.Recv().Type(), y.Recv().Type()); err != nil { + // return fmt.Errorf("receiver: %s", err) + // } + } + case *types.Slice: + y := y.(*types.Slice) + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("slice elements: %s", err) + } + case *types.Struct: + y := y.(*types.Struct) + if x.NumFields() != y.NumFields() { + return fmt.Errorf("unequal struct fields: %d vs %d", + x.NumFields(), y.NumFields()) + } + for i := 0; i < x.NumFields(); i++ { + xf := x.Field(i) + yf := y.Field(i) + if xf.Name() != yf.Name() { + return fmt.Errorf("mismatched fields: %s vs %s", xf, yf) + } + if err := equalType(xf.Type(), yf.Type()); err != nil { + return fmt.Errorf("struct field %s: %s", xf.Name(), err) + } + if x.Tag(i) != y.Tag(i) { + return fmt.Errorf("struct field %s has unequal tags: %q vs %q", + xf.Name(), x.Tag(i), y.Tag(i)) + } + } + case *types.Tuple: + y := y.(*types.Tuple) + if x.Len() != y.Len() { + return fmt.Errorf("unequal tuple lengths: %d vs %d", x.Len(), y.Len()) + } + for i := 0; i < x.Len(); i++ { + if err := equalType(x.At(i).Type(), y.At(i).Type()); err != nil { + return fmt.Errorf("tuple element %d: %s", i, err) + } + } + } + return nil +} + +// TestVeryLongFile tests the position of an import object declared in +// a very long input file. Line numbers greater than maxlines are +// reported as line 1, not garbage or token.NoPos. +func TestVeryLongFile(t *testing.T) { + // parse and typecheck + longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + // export + exportdata, err := gcimporter.BExportData(fset1, pkg) + if err != nil { + t.Fatal(err) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Fatalf("BImportData(%s): %v", pkg.Path(), err) + } + + // compare + posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) + posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) + if want := "foo.go:1:1"; posn2.String() != want { + t.Errorf("X position = %s, want %s (orig was %s)", + posn2, want, posn1) + } +} + +const src = ` +package p + +type ( + T0 = int32 + T1 = struct{} + T2 = struct{ T1 } + Invalid = foo // foo is undeclared +) +` + +func checkPkg(t *testing.T, pkg *types.Package, label string) { + T1 := types.NewStruct(nil, nil) + T2 := types.NewStruct([]*types.Var{types.NewField(0, pkg, "T1", T1, true)}, nil) + + for _, test := range []struct { + name string + typ types.Type + }{ + {"T0", types.Typ[types.Int32]}, + {"T1", T1}, + {"T2", T2}, + {"Invalid", types.Typ[types.Invalid]}, + } { + obj := pkg.Scope().Lookup(test.name) + if obj == nil { + t.Errorf("%s: %s not found", label, test.name) + continue + } + tname, _ := obj.(*types.TypeName) + if tname == nil { + t.Errorf("%s: %v not a type name", label, obj) + continue + } + if !tname.IsAlias() { + t.Errorf("%s: %v: not marked as alias", label, tname) + continue + } + if got := tname.Type(); !types.Identical(got, test.typ) { + t.Errorf("%s: %v: got %v; want %v", label, tname, got, test.typ) + } + } +} + +func TestTypeAliases(t *testing.T) { + // parse and typecheck + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "p.go", src, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) + if err == nil { + // foo in undeclared in src; we should see an error + t.Fatal("invalid source type-checked without error") + } + if pkg1 == nil { + // despite incorrect src we should see a (partially) type-checked package + t.Fatal("nil package returned") + } + checkPkg(t, pkg1, "export") + + // export + exportdata, err := gcimporter.BExportData(fset1, pkg1) + if err != nil { + t.Fatal(err) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg1.Path()) + if err != nil { + t.Fatalf("BImportData(%s): %v", pkg1.Path(), err) + } + checkPkg(t, pkg2, "import") +} diff --git a/internal/gcimporter/bimport.go b/internal/gcimporter/bimport.go new file mode 100644 index 00000000..0bdb47b1 --- /dev/null +++ b/internal/gcimporter/bimport.go @@ -0,0 +1,1044 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. + +package gcimporter + +import ( + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type importer struct { + imports map[string]*types.Package + data []byte + importpath string + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []*types.Package // in order of appearance + typList []types.Type // in order of appearance + interfaceList []*types.Interface // for delayed completion only + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + fake fakeFileSet + + // debugging support + debugFormat bool + read int // bytes read +} + +// BImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + // catch panics and return them as errors + const currentVersion = 6 + version := -1 // unknown version + defer func() { + if e := recover(); e != nil { + // Return a (possibly nil or incomplete) package unchanged (see #16088). + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + p := importer{ + imports: imports, + data: data, + importpath: path, + version: version, + strList: []string{""}, // empty string is mapped to 0 + pathList: []string{""}, // empty string is mapped to 0 + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + version = v + } + } + } + p.version = version + + // read version specific flags - extend as necessary + switch p.version { + // case currentVersion: + // ... + // fallthrough + case currentVersion, 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + errorf("unknown bexport format version %d (%q)", p.version, versionstr) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared()...) + + // read package data + pkg = p.pkg() + + // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + errorf("got %d objects; want %d", objcount, count) + } + + // ignore compiler-specific import data + + // complete interfaces + // TODO(gri) re-investigate if we still need to do this in a delayed fashion + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), p.pkgList[1:]...) + sort.Sort(byPath(list)) + pkg.SetImports(list) + + // package was imported completely and without errors + pkg.MarkComplete() + + return p.read, pkg, nil +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +func (p *importer) pkg() *types.Package { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + errorf("unexpected package tag %d version %d", i, p.version) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + if p.version >= 6 { + p.int() // package height; unused by go/types + } + + // we should never see an empty package name + if name == "" { + errorf("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + errorf("package path %q for pkg index %d", path, len(p.pkgList)) + } + + // if the package was imported before, use that one; otherwise create a new one + if path == "" { + path = p.importpath + } + pkg := p.imports[path] + if pkg == nil { + pkg = types.NewPackage(path, name) + p.imports[path] = pkg + } else if pkg.Name() != name { + errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) + } + p.pkgList = append(p.pkgList, pkg) + + return pkg +} + +// objTag returns the tag value for each object kind. +func objTag(obj types.Object) int { + switch obj.(type) { + case *types.Const: + return constTag + case *types.TypeName: + return typeTag + case *types.Var: + return varTag + case *types.Func: + return funcTag + default: + errorf("unexpected object: %v (%T)", obj, obj) // panics + panic("unreachable") + } +} + +func sameObj(a, b types.Object) bool { + // Because unnamed types are not canonicalized, we cannot simply compare types for + // (pointer) identity. + // Ideally we'd check equality of constant values as well, but this is good enough. + return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) +} + +func (p *importer) declare(obj types.Object) { + pkg := obj.Pkg() + if alt := pkg.Scope().Insert(obj); alt != nil { + // This can only trigger if we import a (non-type) object a second time. + // Excluding type aliases, this cannot happen because 1) we only import a package + // once; and b) we ignore compiler-specific export data which may contain + // functions whose inlined function bodies refer to other functions that + // were already imported. + // However, type aliases require reexporting the original type, so we need + // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, + // method importer.obj, switch case importing functions). + // TODO(gri) review/update this comment once the gc compiler handles type aliases. + if !sameObj(obj, alt) { + errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) + } + } +} + +func (p *importer) obj(tag int) { + switch tag { + case constTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + val := p.value() + p.declare(types.NewConst(pos, pkg, name, typ, val)) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewTypeName(pos, pkg, name, typ)) + + case typeTag: + p.typ(nil, nil) + + case varTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewVar(pos, pkg, name, typ)) + + case funcTag: + pos := p.pos() + pkg, name := p.qualifiedName() + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + p.declare(types.NewFunc(pos, pkg, name, sig)) + + default: + errorf("unexpected object tag %d", tag) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *importer) pos() token.Pos { + if !p.posInfoFormat { + return token.NoPos + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + return p.fake.pos(file, line) +} + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +func (s *fakeFileSet) pos(file string, line int) token.Pos { + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func (p *importer) qualifiedName() (pkg *types.Package, name string) { + name = p.string() + pkg = p.pkg() + return +} + +func (p *importer) record(t types.Type) { + p.typList = append(p.typList, t) +} + +// A dddSlice is a types.Type representing ...T parameters. +// It only appears for parameter types and does not escape +// the importer. +type dddSlice struct { + elem types.Type +} + +func (t *dddSlice) Underlying() types.Type { return t } +func (t *dddSlice) String() string { return "..." + t.elem.String() } + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +// +// A non-nil tname is used as the "owner" of the result type; i.e., +// the result type is the underlying type of tname. tname is used +// to give interface methods a named receiver type where possible. +func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + pos := p.pos() + parent, name := p.qualifiedName() + scope := parent.Scope() + obj := scope.Lookup(name) + + // if the object doesn't exist yet, create and insert it + if obj == nil { + obj = types.NewTypeName(pos, parent, name, nil) + scope.Insert(obj) + } + + if _, ok := obj.(*types.TypeName); !ok { + errorf("pkg = %s, name = %s => %s", parent, name, obj) + } + + // associate new named type with obj if it doesn't exist yet + t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) + + // but record the existing type, if any + tname := obj.Type().(*types.Named) // tname is either t0 or the existing type + p.record(tname) + + // read underlying type + t0.SetUnderlying(p.typ(parent, t0)) + + // interfaces don't have associated methods + if types.IsInterface(t0) { + return tname + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + pos := p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? + params, isddd := p.paramList() + result, _ := p.paramList() + p.int() // go:nointerface pragma - discarded + + sig := types.NewSignature(recv.At(0), params, result, isddd) + t0.AddMethod(types.NewFunc(pos, parent, name, sig)) + } + + return tname + + case arrayTag: + t := new(types.Array) + if p.trackAllTypes { + p.record(t) + } + + n := p.int64() + *t = *types.NewArray(p.typ(parent, nil), n) + return t + + case sliceTag: + t := new(types.Slice) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewSlice(p.typ(parent, nil)) + return t + + case dddTag: + t := new(dddSlice) + if p.trackAllTypes { + p.record(t) + } + + t.elem = p.typ(parent, nil) + return t + + case structTag: + t := new(types.Struct) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewStruct(p.fieldList(parent)) + return t + + case pointerTag: + t := new(types.Pointer) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewPointer(p.typ(parent, nil)) + return t + + case signatureTag: + t := new(types.Signature) + if p.trackAllTypes { + p.record(t) + } + + params, isddd := p.paramList() + result, _ := p.paramList() + *t = *types.NewSignature(nil, params, result, isddd) + return t + + case interfaceTag: + // Create a dummy entry in the type list. This is safe because we + // cannot expect the interface type to appear in a cycle, as any + // such cycle must contain a named type which would have been + // first defined earlier. + // TODO(gri) Is this still true now that we have type aliases? + // See issue #23225. + n := len(p.typList) + if p.trackAllTypes { + p.record(nil) + } + + var embeddeds []types.Type + for n := p.int(); n > 0; n-- { + p.pos() + embeddeds = append(embeddeds, p.typ(parent, nil)) + } + + t := newInterface(p.methodList(parent, tname), embeddeds) + p.interfaceList = append(p.interfaceList, t) + if p.trackAllTypes { + p.typList[n] = t + } + return t + + case mapTag: + t := new(types.Map) + if p.trackAllTypes { + p.record(t) + } + + key := p.typ(parent, nil) + val := p.typ(parent, nil) + *t = *types.NewMap(key, val) + return t + + case chanTag: + t := new(types.Chan) + if p.trackAllTypes { + p.record(t) + } + + dir := chanDir(p.int()) + val := p.typ(parent, nil) + *t = *types.NewChan(dir, val) + return t + + default: + errorf("unexpected type tag %d", i) // panics + panic("unreachable") + } +} + +func chanDir(d int) types.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types.RecvOnly + case 2 /* Csend */ : + return types.SendOnly + case 3 /* Cboth */ : + return types.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { + if n := p.int(); n > 0 { + fields = make([]*types.Var, n) + tags = make([]string, n) + for i := range fields { + fields[i], tags[i] = p.field(parent) + } + } + return +} + +func (p *importer) field(parent *types.Package) (*types.Var, string) { + pos := p.pos() + pkg, name, alias := p.fieldName(parent) + typ := p.typ(parent, nil) + tag := p.string() + + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + errorf("named base type expected") + } + anonymous = true + } else if alias { + // anonymous field: we have an explicit name because it's an alias + anonymous = true + } + + return types.NewField(pos, pkg, name, typ, anonymous), tag +} + +func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { + if n := p.int(); n > 0 { + methods = make([]*types.Func, n) + for i := range methods { + methods[i] = p.method(parent, baseType) + } + } + return +} + +func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { + pos := p.pos() + pkg, name, _ := p.fieldName(parent) + // If we don't have a baseType, use a nil receiver. + // A receiver using the actual interface type (which + // we don't know yet) will be filled in when we call + // types.Interface.Complete. + var recv *types.Var + if baseType != nil { + recv = types.NewVar(token.NoPos, parent, "", baseType) + } + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(recv, params, result, isddd) + return types.NewFunc(pos, pkg, name, sig) +} + +func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { + name = p.string() + pkg = parent + if pkg == nil { + // use the imported package instead + pkg = p.pkgList[0] + } + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return + } + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return +} + +func (p *importer) paramList() (*types.Tuple, bool) { + n := p.int() + if n == 0 { + return nil, false + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + params := make([]*types.Var, n) + isddd := false + for i := range params { + params[i], isddd = p.param(named) + } + return types.NewTuple(params...), isddd +} + +func (p *importer) param(named bool) (*types.Var, bool) { + t := p.typ(nil, nil) + td, isddd := t.(*dddSlice) + if isddd { + t = types.NewSlice(td.elem) + } + + var pkg *types.Package + var name string + if named { + name = p.string() + if name == "" { + errorf("expected named parameter") + } + if name != "_" { + pkg = p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return types.NewVar(token.NoPos, pkg, name, t), isddd +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *importer) value() constant.Value { + switch tag := p.tagOrIndex(); tag { + case falseTag: + return constant.MakeBool(false) + case trueTag: + return constant.MakeBool(true) + case int64Tag: + return constant.MakeInt64(p.int64()) + case floatTag: + return p.float() + case complexTag: + re := p.float() + im := p.float() + return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + case stringTag: + return constant.MakeString(p.string()) + case unknownTag: + return constant.MakeUnknown() + default: + errorf("unexpected value tag %d", tag) // panics + panic("unreachable") + } +} + +func (p *importer) float() constant.Value { + sign := p.int() + if sign == 0 { + return constant.MakeInt64(0) + } + + exp := p.int() + mant := []byte(p.string()) // big endian + + // remove leading 0's if any + for len(mant) > 0 && mant[0] == 0 { + mant = mant[1:] + } + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { + mant[i], mant[j] = mant[j], mant[i] + } + + // adjust exponent (constant.MakeFromBytes creates an integer value, + // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) + exp -= len(mant) << 3 + if len(mant) > 0 { + for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { + exp++ + } + } + + x := constant.MakeFromBytes(mant) + switch { + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + } + + if sign < 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *importer) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *importer) int() int { + x := p.int64() + if int64(int(x)) != x { + errorf("exported integer too large") + } + return int(x) +} + +func (p *importer) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *importer) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *importer) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *importer) marker(want byte) { + if got := p.rawByte(); got != want { + errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + errorf("incorrect position: got %d; want %d", n, pos) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *importer) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + errorf("read error: %v", err) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *importer) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *importer) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *importer) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + errorf("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag + + typeParamTag + unionTag // types.Union + named2Tag // has typeparams + func2Tag // has typeparams + signature2Tag // has typeparams + interface2Tag // has embeddeds +) + +var predecl []types.Type // initialized lazily + +func predeclared() []types.Type { + if predecl == nil { + // initialize lazily to be sure that all + // elements have been initialized before + predecl = []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + comparableType, + } + } + return predecl +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/internal/gcimporter/exportdata.go b/internal/gcimporter/exportdata.go new file mode 100644 index 00000000..f33dc561 --- /dev/null +++ b/internal/gcimporter/exportdata.go @@ -0,0 +1,93 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. + +// This file implements FindExportData. + +package gcimporter + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +func FindExportData(r *bufio.Reader) (hdr string, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, _, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + hdr = string(line) + + return +} diff --git a/internal/gcimporter/gcimporter.go b/internal/gcimporter/gcimporter.go new file mode 100644 index 00000000..31238f0d --- /dev/null +++ b/internal/gcimporter/gcimporter.go @@ -0,0 +1,1078 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, +// but it also contains the original source-based importer code for Go1.6. +// Once we stop supporting 1.6, we can remove that code. + +// Package gcimporter provides various functions for reading +// gc-generated object files that can be used to implement the +// Importer interface defined by the Go 1.5 standard library package. +package gcimporter + +import ( + "bufio" + "errors" + "fmt" + "go/build" + "go/constant" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "text/scanner" +) + +// debugging/development support +const debug = false + +var pkgExts = [...]string{".a", ".o"} + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +// If no file was found, an empty filename is returned. +// +func FindPkg(path, srcDir string) (filename, id string) { + if path == "" { + return + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + id = path // make sure we have an id to print in error message + return + } + noext = strings.TrimSuffix(bp.PkgObj, ".a") + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + if f, err := os.Stat(filename); err == nil && !f.IsDir() { + return + } + } + + filename = "" // not found + return +} + +// ImportData imports a package by reading the gc-generated export data, +// adds the corresponding package object to the packages map indexed by id, +// and returns the object. +// +// The packages map must contains all packages already imported. The data +// reader position must be the beginning of the export data section. The +// filename is only used in error messages. +// +// If packages[id] contains the completely imported package, that package +// can be used directly, and there is no need to call this function (but +// there is also no harm but for extra time used). +// +func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { + // support for parser error handling + defer func() { + switch r := recover().(type) { + case nil: + // nothing to do + case importError: + err = r + default: + panic(r) // internal error + } + }() + + var p parser + p.init(filename, id, data, packages) + pkg = p.parseExport() + + return +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +// +func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { + var rc io.ReadCloser + var filename, id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. + if path == "unsafe" { + return types.Unsafe, nil + } + id = path + + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + filename, id = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %q", id) + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() + + var hdr string + buf := bufio.NewReader(rc) + if hdr, err = FindExportData(buf); err != nil { + return + } + + switch hdr { + case "$$\n": + // Work-around if we don't have a filename; happens only if lookup != nil. + // Either way, the filename is only needed for importer error messages, so + // this is fine. + if filename == "" { + filename = path + } + return ImportData(packages, filename, id, buf) + + case "$$B\n": + var data []byte + data, err = ioutil.ReadAll(buf) + if err != nil { + break + } + + // TODO(gri): allow clients of go/importer to provide a FileSet. + // Or, define a new standard go/types/gcexportdata package. + fset := token.NewFileSet() + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err = IImportData(fset, packages, data[1:], id) + } else { + _, pkg, err = BImportData(fset, packages, data, id) + } + + default: + err = fmt.Errorf("unknown export data header: %q", hdr) + } + + return +} + +// ---------------------------------------------------------------------------- +// Parser + +// TODO(gri) Imported objects don't have position information. +// Ideally use the debug table line info; alternatively +// create some fake position (or the position of the +// import). That way error messages referring to imported +// objects can print meaningful information. + +// parser parses the exports inside a gc compiler-produced +// object/archive file and populates its scope with the results. +type parser struct { + scanner scanner.Scanner + tok rune // current token + lit string // literal string; only valid for Ident, Int, String tokens + id string // package id of imported package + sharedPkgs map[string]*types.Package // package id -> package object (across importer) + localPkgs map[string]*types.Package // package id -> package object (just this package) +} + +func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { + p.scanner.Init(src) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' + p.scanner.Filename = filename // for good error messages + p.next() + p.id = id + p.sharedPkgs = packages + if debug { + // check consistency of packages map + for _, pkg := range packages { + if pkg.Name() == "" { + fmt.Printf("no package name for %s\n", pkg.Path()) + } + } + } +} + +func (p *parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } + if debug { + fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) + } +} + +func declTypeName(pkg *types.Package, name string) *types.TypeName { + scope := pkg.Scope() + if obj := scope.Lookup(name); obj != nil { + return obj.(*types.TypeName) + } + obj := types.NewTypeName(token.NoPos, pkg, name, nil) + // a named type may be referred to before the underlying type + // is known - set it up + types.NewNamed(obj, nil, nil) + scope.Insert(obj) + return obj +} + +// ---------------------------------------------------------------------------- +// Error handling + +// Internal errors are boxed as importErrors. +type importError struct { + pos scanner.Position + err error +} + +func (e importError) Error() string { + return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) +} + +func (p *parser) error(err interface{}) { + if s, ok := err.(string); ok { + err = errors.New(s) + } + // panic with a runtime.Error if err is not an error + panic(importError{p.scanner.Pos(), err.(error)}) +} + +func (p *parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *parser) expectSpecial(tok string) { + sep := 'x' // not white space + i := 0 + for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + i++ + } + if i < len(tok) { + p.errorf("expected %q, got %q", tok, tok[0:i]) + } +} + +func (p *parser) expectKeyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword %s, got %q", keyword, lit) + } +} + +// ---------------------------------------------------------------------------- +// Qualified and unqualified names + +// PackageId = string_lit . +// +func (p *parser) parsePackageId() string { + id, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + p.error(err) + } + // id == "" stands for the imported package id + // (only known at time of package installation) + if id == "" { + id = p.id + } + return id +} + +// PackageName = ident . +// +func (p *parser) parsePackageName() string { + return p.expect(scanner.Ident) +} + +// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . +func (p *parser) parseDotIdent() string { + ident := "" + if p.tok != scanner.Int { + sep := 'x' // not white space + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + } + } + if ident == "" { + p.expect(scanner.Ident) // use expect() for error handling + } + return ident +} + +// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . +// +func (p *parser) parseQualifiedName() (id, name string) { + p.expect('@') + id = p.parsePackageId() + p.expect('.') + // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. + if p.tok == '?' { + p.next() + } else { + name = p.parseDotIdent() + } + return +} + +// getPkg returns the package for a given id. If the package is +// not found, create the package and add it to the p.localPkgs +// and p.sharedPkgs maps. name is the (expected) name of the +// package. If name == "", the package name is expected to be +// set later via an import clause in the export data. +// +// id identifies a package, usually by a canonical package path like +// "encoding/json" but possibly by a non-canonical import path like +// "./json". +// +func (p *parser) getPkg(id, name string) *types.Package { + // package unsafe is not in the packages maps - handle explicitly + if id == "unsafe" { + return types.Unsafe + } + + pkg := p.localPkgs[id] + if pkg == nil { + // first import of id from this package + pkg = p.sharedPkgs[id] + if pkg == nil { + // first import of id by this importer; + // add (possibly unnamed) pkg to shared packages + pkg = types.NewPackage(id, name) + p.sharedPkgs[id] = pkg + } + // add (possibly unnamed) pkg to local packages + if p.localPkgs == nil { + p.localPkgs = make(map[string]*types.Package) + } + p.localPkgs[id] = pkg + } else if name != "" { + // package exists already and we have an expected package name; + // make sure names match or set package name if necessary + if pname := pkg.Name(); pname == "" { + pkg.SetName(name) + } else if pname != name { + p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) + } + } + return pkg +} + +// parseExportedName is like parseQualifiedName, but +// the package id is resolved to an imported *types.Package. +// +func (p *parser) parseExportedName() (pkg *types.Package, name string) { + id, name := p.parseQualifiedName() + pkg = p.getPkg(id, "") + return +} + +// ---------------------------------------------------------------------------- +// Types + +// BasicType = identifier . +// +func (p *parser) parseBasicType() types.Type { + id := p.expect(scanner.Ident) + obj := types.Universe.Lookup(id) + if obj, ok := obj.(*types.TypeName); ok { + return obj.Type() + } + p.errorf("not a basic type: %s", id) + return nil +} + +// ArrayType = "[" int_lit "]" Type . +// +func (p *parser) parseArrayType(parent *types.Package) types.Type { + // "[" already consumed and lookahead known not to be "]" + lit := p.expect(scanner.Int) + p.expect(']') + elem := p.parseType(parent) + n, err := strconv.ParseInt(lit, 10, 64) + if err != nil { + p.error(err) + } + return types.NewArray(elem, n) +} + +// MapType = "map" "[" Type "]" Type . +// +func (p *parser) parseMapType(parent *types.Package) types.Type { + p.expectKeyword("map") + p.expect('[') + key := p.parseType(parent) + p.expect(']') + elem := p.parseType(parent) + return types.NewMap(key, elem) +} + +// Name = identifier | "?" | QualifiedName . +// +// For unqualified and anonymous names, the returned package is the parent +// package unless parent == nil, in which case the returned package is the +// package being imported. (The parent package is not nil if the the name +// is an unqualified struct field or interface method name belonging to a +// type declared in another package.) +// +// For qualified names, the returned package is nil (and not created if +// it doesn't exist yet) unless materializePkg is set (which creates an +// unnamed package with valid package path). In the latter case, a +// subsequent import clause is expected to provide a name for the package. +// +func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { + pkg = parent + if pkg == nil { + pkg = p.sharedPkgs[p.id] + } + switch p.tok { + case scanner.Ident: + name = p.lit + p.next() + case '?': + // anonymous + p.next() + case '@': + // exported name prefixed with package path + pkg = nil + var id string + id, name = p.parseQualifiedName() + if materializePkg { + pkg = p.getPkg(id, "") + } + default: + p.error("name expected") + } + return +} + +func deref(typ types.Type) types.Type { + if p, _ := typ.(*types.Pointer); p != nil { + return p.Elem() + } + return typ +} + +// Field = Name Type [ string_lit ] . +// +func (p *parser) parseField(parent *types.Package) (*types.Var, string) { + pkg, name := p.parseName(parent, true) + + if name == "_" { + // Blank fields should be package-qualified because they + // are unexported identifiers, but gc does not qualify them. + // Assuming that the ident belongs to the current package + // causes types to change during re-exporting, leading + // to spurious "can't assign A to B" errors from go/types. + // As a workaround, pretend all blank fields belong + // to the same unique dummy package. + const blankpkg = "<_>" + pkg = p.getPkg(blankpkg, blankpkg) + } + + typ := p.parseType(parent) + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + p.errorf("anonymous field expected") + } + anonymous = true + } + tag := "" + if p.tok == scanner.String { + s := p.expect(scanner.String) + var err error + tag, err = strconv.Unquote(s) + if err != nil { + p.errorf("invalid struct tag %s: %s", s, err) + } + } + return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +// +func (p *parser) parseStructType(parent *types.Package) types.Type { + var fields []*types.Var + var tags []string + + p.expectKeyword("struct") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + fld, tag := p.parseField(parent) + if tag != "" && tags == nil { + tags = make([]string, i) + } + if tags != nil { + tags = append(tags, tag) + } + fields = append(fields, fld) + } + p.expect('}') + + return types.NewStruct(fields, tags) +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +// +func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { + _, name := p.parseName(nil, false) + // remove gc-specific parameter numbering + if i := strings.Index(name, "·"); i >= 0 { + name = name[:i] + } + if p.tok == '.' { + p.expectSpecial("...") + isVariadic = true + } + typ := p.parseType(nil) + if isVariadic { + typ = types.NewSlice(typ) + } + // ignore argument tag (e.g. "noescape") + if p.tok == scanner.String { + p.next() + } + // TODO(gri) should we provide a package? + par = types.NewVar(token.NoPos, nil, name, typ) + return +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +// +func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { + p.expect('(') + for p.tok != ')' && p.tok != scanner.EOF { + if len(list) > 0 { + p.expect(',') + } + par, variadic := p.parseParameter() + list = append(list, par) + if variadic { + if isVariadic { + p.error("... not on final argument") + } + isVariadic = true + } + } + p.expect(')') + + return +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +// +func (p *parser) parseSignature(recv *types.Var) *types.Signature { + params, isVariadic := p.parseParameters() + + // optional result type + var results []*types.Var + if p.tok == '(' { + var variadic bool + results, variadic = p.parseParameters() + if variadic { + p.error("... not permitted on result type") + } + } + + return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) +} + +// InterfaceType = "interface" "{" [ MethodList ] "}" . +// MethodList = Method { ";" Method } . +// Method = Name Signature . +// +// The methods of embedded interfaces are always "inlined" +// by the compiler and thus embedded interfaces are never +// visible in the export data. +// +func (p *parser) parseInterfaceType(parent *types.Package) types.Type { + var methods []*types.Func + + p.expectKeyword("interface") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + pkg, name := p.parseName(parent, true) + sig := p.parseSignature(nil) + methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) + } + p.expect('}') + + // Complete requires the type's embedded interfaces to be fully defined, + // but we do not define any + return types.NewInterface(methods, nil).Complete() +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +// +func (p *parser) parseChanType(parent *types.Package) types.Type { + dir := types.SendRecv + if p.tok == scanner.Ident { + p.expectKeyword("chan") + if p.tok == '<' { + p.expectSpecial("<-") + dir = types.SendOnly + } + } else { + p.expectSpecial("<-") + p.expectKeyword("chan") + dir = types.RecvOnly + } + elem := p.parseType(parent) + return types.NewChan(dir, elem) +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +// +func (p *parser) parseType(parent *types.Package) types.Type { + switch p.tok { + case scanner.Ident: + switch p.lit { + default: + return p.parseBasicType() + case "struct": + return p.parseStructType(parent) + case "func": + // FuncType + p.next() + return p.parseSignature(nil) + case "interface": + return p.parseInterfaceType(parent) + case "map": + return p.parseMapType(parent) + case "chan": + return p.parseChanType(parent) + } + case '@': + // TypeName + pkg, name := p.parseExportedName() + return declTypeName(pkg, name).Type() + case '[': + p.next() // look ahead + if p.tok == ']' { + // SliceType + p.next() + return types.NewSlice(p.parseType(parent)) + } + return p.parseArrayType(parent) + case '*': + // PointerType + p.next() + return types.NewPointer(p.parseType(parent)) + case '<': + return p.parseChanType(parent) + case '(': + // "(" Type ")" + p.next() + typ := p.parseType(parent) + p.expect(')') + return typ + } + p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) + return nil +} + +// ---------------------------------------------------------------------------- +// Declarations + +// ImportDecl = "import" PackageName PackageId . +// +func (p *parser) parseImportDecl() { + p.expectKeyword("import") + name := p.parsePackageName() + p.getPkg(p.parsePackageId(), name) +} + +// int_lit = [ "+" | "-" ] { "0" ... "9" } . +// +func (p *parser) parseInt() string { + s := "" + switch p.tok { + case '-': + s = "-" + p.next() + case '+': + p.next() + } + return s + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +// +func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) { + // mantissa + mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0) + if mant == nil { + panic("invalid mantissa") + } + + if p.lit == "p" { + // exponent (base 2) + p.next() + exp, err := strconv.ParseInt(p.parseInt(), 10, 0) + if err != nil { + p.error(err) + } + if exp < 0 { + denom := constant.MakeInt64(1) + denom = constant.Shift(denom, token.SHL, uint(-exp)) + typ = types.Typ[types.UntypedFloat] + val = constant.BinaryOp(mant, token.QUO, denom) + return + } + if exp > 0 { + mant = constant.Shift(mant, token.SHL, uint(exp)) + } + typ = types.Typ[types.UntypedFloat] + val = mant + return + } + + typ = types.Typ[types.UntypedInt] + val = mant + return +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit "i" ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +// +func (p *parser) parseConstDecl() { + p.expectKeyword("const") + pkg, name := p.parseExportedName() + + var typ0 types.Type + if p.tok != '=' { + // constant types are never structured - no need for parent type + typ0 = p.parseType(nil) + } + + p.expect('=') + var typ types.Type + var val constant.Value + switch p.tok { + case scanner.Ident: + // bool_lit + if p.lit != "true" && p.lit != "false" { + p.error("expected true or false") + } + typ = types.Typ[types.UntypedBool] + val = constant.MakeBool(p.lit == "true") + p.next() + + case '-', scanner.Int: + // int_lit + typ, val = p.parseNumber() + + case '(': + // complex_lit or rune_lit + p.next() + if p.tok == scanner.Char { + p.next() + p.expect('+') + typ = types.Typ[types.UntypedRune] + _, val = p.parseNumber() + p.expect(')') + break + } + _, re := p.parseNumber() + p.expect('+') + _, im := p.parseNumber() + p.expectKeyword("i") + p.expect(')') + typ = types.Typ[types.UntypedComplex] + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + case scanner.Char: + // rune_lit + typ = types.Typ[types.UntypedRune] + val = constant.MakeFromLiteral(p.lit, token.CHAR, 0) + p.next() + + case scanner.String: + // string_lit + typ = types.Typ[types.UntypedString] + val = constant.MakeFromLiteral(p.lit, token.STRING, 0) + p.next() + + default: + p.errorf("expected literal got %s", scanner.TokenString(p.tok)) + } + + if typ0 == nil { + typ0 = typ + } + + pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) +} + +// TypeDecl = "type" ExportedName Type . +// +func (p *parser) parseTypeDecl() { + p.expectKeyword("type") + pkg, name := p.parseExportedName() + obj := declTypeName(pkg, name) + + // The type object may have been imported before and thus already + // have a type associated with it. We still need to parse the type + // structure, but throw it away if the object already has a type. + // This ensures that all imports refer to the same type object for + // a given type declaration. + typ := p.parseType(pkg) + + if name := obj.Type().(*types.Named); name.Underlying() == nil { + name.SetUnderlying(typ) + } +} + +// VarDecl = "var" ExportedName Type . +// +func (p *parser) parseVarDecl() { + p.expectKeyword("var") + pkg, name := p.parseExportedName() + typ := p.parseType(pkg) + pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) +} + +// Func = Signature [ Body ] . +// Body = "{" ... "}" . +// +func (p *parser) parseFunc(recv *types.Var) *types.Signature { + sig := p.parseSignature(recv) + if p.tok == '{' { + p.next() + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } + } + return sig +} + +// MethodDecl = "func" Receiver Name Func . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . +// +func (p *parser) parseMethodDecl() { + // "func" already consumed + p.expect('(') + recv, _ := p.parseParameter() // receiver + p.expect(')') + + // determine receiver base type object + base := deref(recv.Type()).(*types.Named) + + // parse method name, signature, and possibly inlined body + _, name := p.parseName(nil, false) + sig := p.parseFunc(recv) + + // methods always belong to the same package as the base type object + pkg := base.Obj().Pkg() + + // add method to type unless type was imported before + // and method exists already + // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. + base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) +} + +// FuncDecl = "func" ExportedName Func . +// +func (p *parser) parseFuncDecl() { + // "func" already consumed + pkg, name := p.parseExportedName() + typ := p.parseFunc(nil) + pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +// +func (p *parser) parseDecl() { + if p.tok == scanner.Ident { + switch p.lit { + case "import": + p.parseImportDecl() + case "const": + p.parseConstDecl() + case "type": + p.parseTypeDecl() + case "var": + p.parseVarDecl() + case "func": + p.next() // look ahead + if p.tok == '(' { + p.parseMethodDecl() + } else { + p.parseFuncDecl() + } + } + } + p.expect('\n') +} + +// ---------------------------------------------------------------------------- +// Export + +// Export = "PackageClause { Decl } "$$" . +// PackageClause = "package" PackageName [ "safe" ] "\n" . +// +func (p *parser) parseExport() *types.Package { + p.expectKeyword("package") + name := p.parsePackageName() + if p.tok == scanner.Ident && p.lit == "safe" { + // package was compiled with -u option - ignore + p.next() + } + p.expect('\n') + + pkg := p.getPkg(p.id, name) + + for p.tok != '$' && p.tok != scanner.EOF { + p.parseDecl() + } + + if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { + // don't call next()/expect() since reading past the + // export data may cause scanner errors (e.g. NUL chars) + p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) + } + + if n := p.scanner.ErrorCount; n != 0 { + p.errorf("expected no scanner errors, got %d", n) + } + + // Record all locally referenced packages as imports. + var imports []*types.Package + for id, pkg2 := range p.localPkgs { + if pkg2.Name() == "" { + p.errorf("%s package has no name", id) + } + if id == p.id { + continue // avoid self-edge + } + imports = append(imports, pkg2) + } + sort.Sort(byPath(imports)) + pkg.SetImports(imports) + + // package was imported completely and without errors + pkg.MarkComplete() + + return pkg +} + +type byPath []*types.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/internal/gcimporter/gcimporter11_test.go b/internal/gcimporter/gcimporter11_test.go new file mode 100644 index 00000000..18186817 --- /dev/null +++ b/internal/gcimporter/gcimporter11_test.go @@ -0,0 +1,129 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 + +package gcimporter + +import ( + "go/types" + "runtime" + "strings" + "testing" +) + +var importedObjectTests = []struct { + name string + want string +}{ + // non-interfaces + {"crypto.Hash", "type Hash uint"}, + {"go/ast.ObjKind", "type ObjKind int"}, + {"go/types.Qualifier", "type Qualifier func(*Package) string"}, + {"go/types.Comparable", "func Comparable(T Type) bool"}, + {"math.Pi", "const Pi untyped float"}, + {"math.Sin", "func Sin(x float64) float64"}, + {"go/ast.NotNilFilter", "func NotNilFilter(_ string, v reflect.Value) bool"}, + {"go/internal/gcimporter.BImportData", "func BImportData(fset *go/token.FileSet, imports map[string]*go/types.Package, data []byte, path string) (_ int, pkg *go/types.Package, err error)"}, + + // interfaces + {"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}"}, + {"crypto.Decrypter", "type Decrypter interface{Decrypt(rand io.Reader, msg []byte, opts DecrypterOpts) (plaintext []byte, err error); Public() PublicKey}"}, + {"encoding.BinaryMarshaler", "type BinaryMarshaler interface{MarshalBinary() (data []byte, err error)}"}, + {"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"}, + {"io.ReadWriter", "type ReadWriter interface{Reader; Writer}"}, + {"go/ast.Node", "type Node interface{End() go/token.Pos; Pos() go/token.Pos}"}, + {"go/types.Type", "type Type interface{String() string; Underlying() Type}"}, +} + +func TestImportedTypes(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + for _, test := range importedObjectTests { + s := strings.Split(test.name, ".") + if len(s) != 2 { + t.Fatal("inconsistent test data") + } + importPath := s[0] + objName := s[1] + + pkg, err := Import(make(map[string]*types.Package), importPath, ".", nil) + if err != nil { + t.Error(err) + continue + } + + obj := pkg.Scope().Lookup(objName) + if obj == nil { + t.Errorf("%s: object not found", test.name) + continue + } + + got := types.ObjectString(obj, types.RelativeTo(pkg)) + if got != test.want { + t.Errorf("%s: got %q; want %q", test.name, got, test.want) + } + + if named, _ := obj.Type().(*types.Named); named != nil { + verifyInterfaceMethodRecvs(t, named, 0) + } + } +} + +// verifyInterfaceMethodRecvs verifies that method receiver types +// are named if the methods belong to a named interface type. +func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) { + // avoid endless recursion in case of an embedding bug that lead to a cycle + if level > 10 { + t.Errorf("%s: embeds itself", named) + return + } + + iface, _ := named.Underlying().(*types.Interface) + if iface == nil { + return // not an interface + } + + // check explicitly declared methods + for i := 0; i < iface.NumExplicitMethods(); i++ { + m := iface.ExplicitMethod(i) + recv := m.Type().(*types.Signature).Recv() + if recv == nil { + t.Errorf("%s: missing receiver type", m) + continue + } + if recv.Type() != named { + t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named) + } + } + + // check embedded interfaces (if they are named, too) + for i := 0; i < iface.NumEmbeddeds(); i++ { + // embedding of interfaces cannot have cycles; recursion will terminate + if etype, _ := iface.EmbeddedType(i).(*types.Named); etype != nil { + verifyInterfaceMethodRecvs(t, etype, level+1) + } + } +} +func TestIssue25301(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + compileAndImportPkg(t, "issue25301") +} diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go new file mode 100644 index 00000000..14622d34 --- /dev/null +++ b/internal/gcimporter/gcimporter_test.go @@ -0,0 +1,517 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter_test.go, +// adjusted to make it build with code from (std lib) internal/testenv copied. + +package gcimporter + +import ( + "bytes" + "fmt" + "go/types" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" + "time" +) + +// ---------------------------------------------------------------------------- +// The following three functions (Builder, HasGoBuild, MustHaveGoBuild) were +// copied from $GOROOT/src/internal/testenv since that package is not available +// in x/tools. + +// Builder reports the name of the builder running this test +// (for example, "linux-amd64" or "windows-386-gce"). +// If the test is not running on the build infrastructure, +// Builder returns the empty string. +func Builder() string { + return os.Getenv("GO_BUILDER_NAME") +} + +// HasGoBuild reports whether the current system can build programs with ``go build'' +// and then run them with os.StartProcess or exec.Command. +func HasGoBuild() bool { + switch runtime.GOOS { + case "android", "nacl": + return false + case "darwin": + if strings.HasPrefix(runtime.GOARCH, "arm") { + return false + } + } + return true +} + +// MustHaveGoBuild checks that the current system can build programs with ``go build'' +// and then run them with os.StartProcess or exec.Command. +// If not, MustHaveGoBuild calls t.Skip with an explanation. +func MustHaveGoBuild(t *testing.T) { + if !HasGoBuild() { + t.Skipf("skipping test: 'go build' not available on %s/%s", runtime.GOOS, runtime.GOARCH) + } +} + +// ---------------------------------------------------------------------------- + +// skipSpecialPlatforms causes the test to be skipped for platforms where +// builders (build.golang.org) don't have access to compiled packages for +// import. +func skipSpecialPlatforms(t *testing.T) { + switch platform := runtime.GOOS + "-" + runtime.GOARCH; platform { + case "nacl-amd64p32", + "nacl-386", + "nacl-arm", + "darwin-arm", + "darwin-arm64": + t.Skipf("no compiled packages available for import on %s", platform) + } +} + +// compile runs the compiler on filename, with dirname as the working directory, +// and writes the output file to outdirname. +func compile(t *testing.T, dirname, filename, outdirname string) string { + /* testenv. */ MustHaveGoBuild(t) + // filename must end with ".go" + if !strings.HasSuffix(filename, ".go") { + t.Fatalf("filename doesn't end in .go: %s", filename) + } + basename := filepath.Base(filename) + outname := filepath.Join(outdirname, basename[:len(basename)-2]+"o") + cmd := exec.Command("go", "tool", "compile", "-o", outname, filename) + cmd.Dir = dirname + out, err := cmd.CombinedOutput() + if err != nil { + t.Logf("%s", out) + t.Fatalf("go tool compile %s failed: %s", filename, err) + } + return outname +} + +func testPath(t *testing.T, path, srcDir string) *types.Package { + t0 := time.Now() + pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil) + if err != nil { + t.Errorf("testPath(%s): %s", path, err) + return nil + } + t.Logf("testPath(%s): %v", path, time.Since(t0)) + return pkg +} + +const maxTime = 30 * time.Second + +func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) { + dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir) + list, err := ioutil.ReadDir(dirname) + if err != nil { + t.Fatalf("testDir(%s): %s", dirname, err) + } + for _, f := range list { + if time.Now().After(endTime) { + t.Log("testing time used up") + return + } + switch { + case !f.IsDir(): + // try extensions + for _, ext := range pkgExts { + if strings.HasSuffix(f.Name(), ext) { + name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension + if testPath(t, filepath.Join(dir, name), dir) != nil { + nimports++ + } + } + } + case f.IsDir(): + nimports += testDir(t, filepath.Join(dir, f.Name()), endTime) + } + } + return +} + +func mktmpdir(t *testing.T) string { + tmpdir, err := ioutil.TempDir("", "gcimporter_test") + if err != nil { + t.Fatal("mktmpdir:", err) + } + if err := os.Mkdir(filepath.Join(tmpdir, "testdata"), 0700); err != nil { + os.RemoveAll(tmpdir) + t.Fatal("mktmpdir:", err) + } + return tmpdir +} + +const testfile = "exports.go" + +func TestImportTestdata(t *testing.T) { + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + + compile(t, "testdata", testfile, filepath.Join(tmpdir, "testdata")) + + // filename should end with ".go" + filename := testfile[:len(testfile)-3] + if pkg := testPath(t, "./testdata/"+filename, tmpdir); pkg != nil { + // The package's Imports list must include all packages + // explicitly imported by testfile, plus all packages + // referenced indirectly via exported objects in testfile. + // With the textual export format (when run against Go1.6), + // the list may also include additional packages that are + // not strictly required for import processing alone (they + // are exported to err "on the safe side"). + // For now, we just test the presence of a few packages + // that we know are there for sure. + got := fmt.Sprint(pkg.Imports()) + for _, want := range []string{"go/ast", "go/token"} { + if !strings.Contains(got, want) { + t.Errorf(`Package("exports").Imports() = %s, does not contain %s`, got, want) + } + } + } +} + +func TestVersionHandling(t *testing.T) { + skipSpecialPlatforms(t) // we really only need to exclude nacl platforms, but this is fine + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + const dir = "./testdata/versions" + list, err := ioutil.ReadDir(dir) + if err != nil { + t.Fatal(err) + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + corruptdir := filepath.Join(tmpdir, "testdata", "versions") + if err := os.Mkdir(corruptdir, 0700); err != nil { + t.Fatal(err) + } + + for _, f := range list { + name := f.Name() + if !strings.HasSuffix(name, ".a") { + continue // not a package file + } + if strings.Contains(name, "corrupted") { + continue // don't process a leftover corrupted file + } + pkgpath := "./" + name[:len(name)-2] + + if testing.Verbose() { + t.Logf("importing %s", name) + } + + // test that export data can be imported + _, err := Import(make(map[string]*types.Package), pkgpath, dir, nil) + if err != nil { + // ok to fail if it fails with a newer version error for select files + if strings.Contains(err.Error(), "newer version") { + switch name { + case "test_go1.11_999b.a", "test_go1.11_999i.a": + continue + } + // fall through + } + t.Errorf("import %q failed: %v", pkgpath, err) + continue + } + + // create file with corrupted export data + // 1) read file + data, err := ioutil.ReadFile(filepath.Join(dir, name)) + if err != nil { + t.Fatal(err) + } + // 2) find export data + i := bytes.Index(data, []byte("\n$$B\n")) + 5 + j := bytes.Index(data[i:], []byte("\n$$\n")) + i + if i < 0 || j < 0 || i > j { + t.Fatalf("export data section not found (i = %d, j = %d)", i, j) + } + // 3) corrupt the data (increment every 7th byte) + for k := j - 13; k >= i; k -= 7 { + data[k]++ + } + // 4) write the file + pkgpath += "_corrupted" + filename := filepath.Join(corruptdir, pkgpath) + ".a" + ioutil.WriteFile(filename, data, 0666) + + // test that importing the corrupted file results in an error + _, err = Import(make(map[string]*types.Package), pkgpath, corruptdir, nil) + if err == nil { + t.Errorf("import corrupted %q succeeded", pkgpath) + } else if msg := err.Error(); !strings.Contains(msg, "version skew") { + t.Errorf("import %q error incorrect (%s)", pkgpath, msg) + } + } +} + +func TestImportStdLib(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + dt := maxTime + if testing.Short() && /* testenv. */ Builder() == "" { + dt = 10 * time.Millisecond + } + nimports := testDir(t, "", time.Now().Add(dt)) // installed packages + t.Logf("tested %d imports", nimports) +} + +func TestIssue5815(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + pkg := importPkg(t, "strings", ".") + + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if obj.Pkg() == nil { + t.Errorf("no pkg for %s", obj) + } + if tname, _ := obj.(*types.TypeName); tname != nil { + named := tname.Type().(*types.Named) + for i := 0; i < named.NumMethods(); i++ { + m := named.Method(i) + if m.Pkg() == nil { + t.Errorf("no pkg for %s", m) + } + } + } + } +} + +// Smoke test to ensure that imported methods get the correct package. +func TestCorrectMethodPackage(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + imports := make(map[string]*types.Package) + _, err := Import(imports, "net/http", ".", nil) + if err != nil { + t.Fatal(err) + } + + mutex := imports["sync"].Scope().Lookup("Mutex").(*types.TypeName).Type() + mset := types.NewMethodSet(types.NewPointer(mutex)) // methods of *sync.Mutex + sel := mset.Lookup(nil, "Lock") + lock := sel.Obj().(*types.Func) + if got, want := lock.Pkg().Path(), "sync"; got != want { + t.Errorf("got package path %q; want %q", got, want) + } +} + +func TestIssue13566(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + testoutdir := filepath.Join(tmpdir, "testdata") + + // b.go needs to be compiled from the output directory so that the compiler can + // find the compiled package a. We pass the full path to compile() so that we + // don't have to copy the file to that directory. + bpath, err := filepath.Abs(filepath.Join("testdata", "b.go")) + if err != nil { + t.Fatal(err) + } + compile(t, "testdata", "a.go", testoutdir) + compile(t, testoutdir, bpath, testoutdir) + + // import must succeed (test for issue at hand) + pkg := importPkg(t, "./testdata/b", tmpdir) + + // make sure all indirectly imported packages have names + for _, imp := range pkg.Imports() { + if imp.Name() == "" { + t.Errorf("no name for %s package", imp.Path()) + } + } +} + +func TestIssue13898(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // import go/internal/gcimporter which imports go/types partially + imports := make(map[string]*types.Package) + _, err := Import(imports, "go/internal/gcimporter", ".", nil) + if err != nil { + t.Fatal(err) + } + + // look for go/types package + var goTypesPkg *types.Package + for path, pkg := range imports { + if path == "go/types" { + goTypesPkg = pkg + break + } + } + if goTypesPkg == nil { + t.Fatal("go/types not found") + } + + // look for go/types.Object type + obj := lookupObj(t, goTypesPkg.Scope(), "Object") + typ, ok := obj.Type().(*types.Named) + if !ok { + t.Fatalf("go/types.Object type is %v; wanted named type", typ) + } + + // lookup go/types.Object.Pkg method + m, index, indirect := types.LookupFieldOrMethod(typ, false, nil, "Pkg") + if m == nil { + t.Fatalf("go/types.Object.Pkg not found (index = %v, indirect = %v)", index, indirect) + } + + // the method must belong to go/types + if m.Pkg().Path() != "go/types" { + t.Fatalf("found %v; want go/types", m.Pkg()) + } +} + +func TestIssue15517(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + + compile(t, "testdata", "p.go", filepath.Join(tmpdir, "testdata")) + + // Multiple imports of p must succeed without redeclaration errors. + // We use an import path that's not cleaned up so that the eventual + // file path for the package is different from the package path; this + // will expose the error if it is present. + // + // (Issue: Both the textual and the binary importer used the file path + // of the package to be imported as key into the shared packages map. + // However, the binary importer then used the package path to identify + // the imported package to mark it as complete; effectively marking the + // wrong package as complete. By using an "unclean" package path, the + // file and package path are different, exposing the problem if present. + // The same issue occurs with vendoring.) + imports := make(map[string]*types.Package) + for i := 0; i < 3; i++ { + if _, err := Import(imports, "./././testdata/p", tmpdir, nil); err != nil { + t.Fatal(err) + } + } +} + +func TestIssue15920(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + compileAndImportPkg(t, "issue15920") +} + +func TestIssue20046(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + // "./issue20046".V.M must exist + pkg := compileAndImportPkg(t, "issue20046") + obj := lookupObj(t, pkg.Scope(), "V") + if m, index, indirect := types.LookupFieldOrMethod(obj.Type(), false, nil, "M"); m == nil { + t.Fatalf("V.M not found (index = %v, indirect = %v)", index, indirect) + } +} + +func importPkg(t *testing.T, path, srcDir string) *types.Package { + pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil) + if err != nil { + t.Fatal(err) + } + return pkg +} + +func compileAndImportPkg(t *testing.T, name string) *types.Package { + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + compile(t, "testdata", name+".go", filepath.Join(tmpdir, "testdata")) + return importPkg(t, "./testdata/"+name, tmpdir) +} + +func lookupObj(t *testing.T, scope *types.Scope, name string) types.Object { + if obj := scope.Lookup(name); obj != nil { + return obj + } + t.Fatalf("%s not found", name) + return nil +} diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go new file mode 100644 index 00000000..be671c79 --- /dev/null +++ b/internal/gcimporter/iexport.go @@ -0,0 +1,723 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; +// see that file for specification of the format. + +// +build go1.11 + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "go/ast" + "go/constant" + "go/token" + "go/types" + "io" + "math/big" + "reflect" + "sort" +) + +// Current indexed export format version. Increase with each format change. +// 0: Go1.11 encoding +const iexportVersion = 0 + +// IExportData returns the binary export data for pkg. +// If no file set is provided, position info will be missing. +func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := iexporter{ + out: bytes.NewBuffer(nil), + fset: fset, + allPkgs: map[*types.Package]bool{}, + stringIndex: map[string]uint64{}, + declIndex: map[types.Object]uint64{}, + typIndex: map[types.Type]uint64{}, + } + + for i, pt := range predeclared() { + p.typIndex[pt] = uint64(i) + } + if len(p.typIndex) > predeclReserved { + panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) + } + + // Initialize work queue with exported declarations. + scope := pkg.Scope() + for _, name := range scope.Names() { + if ast.IsExported(name) { + p.pushDecl(scope.Lookup(name)) + } + } + + // Loop until no more work. + for !p.declTodo.empty() { + p.doDecl(p.declTodo.popHead()) + } + + // Append indices to data0 section. + dataLen := uint64(p.data0.Len()) + w := p.newWriter() + w.writeIndex(p.declIndex, pkg) + w.flush() + + // Assemble header. + var hdr intWriter + hdr.WriteByte('i') + hdr.uint64(iexportVersion) + hdr.uint64(uint64(p.strings.Len())) + hdr.uint64(dataLen) + + // Flush output. + io.Copy(p.out, &hdr) + io.Copy(p.out, &p.strings) + io.Copy(p.out, &p.data0) + + return p.out.Bytes(), nil +} + +// writeIndex writes out an object index. mainIndex indicates whether +// we're writing out the main index, which is also read by +// non-compiler tools and includes a complete package description +// (i.e., name and height). +func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types.Package) { + // Build a map from packages to objects from that package. + pkgObjs := map[*types.Package][]types.Object{} + + // For the main index, make sure to include every package that + // we reference, even if we're not exporting (or reexporting) + // any symbols from it. + pkgObjs[localpkg] = nil + for pkg := range w.p.allPkgs { + pkgObjs[pkg] = nil + } + + for obj := range index { + pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) + } + + var pkgs []*types.Package + for pkg, objs := range pkgObjs { + pkgs = append(pkgs, pkg) + + sort.Slice(objs, func(i, j int) bool { + return objs[i].Name() < objs[j].Name() + }) + } + + sort.Slice(pkgs, func(i, j int) bool { + return pkgs[i].Path() < pkgs[j].Path() + }) + + w.uint64(uint64(len(pkgs))) + for _, pkg := range pkgs { + w.string(pkg.Path()) + w.string(pkg.Name()) + w.uint64(uint64(0)) // package height is not needed for go/types + + objs := pkgObjs[pkg] + w.uint64(uint64(len(objs))) + for _, obj := range objs { + w.string(obj.Name()) + w.uint64(index[obj]) + } + } +} + +type iexporter struct { + fset *token.FileSet + out *bytes.Buffer + + // allPkgs tracks all packages that have been referenced by + // the export data, so we can ensure to include them in the + // main index. + allPkgs map[*types.Package]bool + + declTodo objQueue + + strings intWriter + stringIndex map[string]uint64 + + data0 intWriter + declIndex map[types.Object]uint64 + typIndex map[types.Type]uint64 +} + +// stringOff returns the offset of s within the string section. +// If not already present, it's added to the end. +func (p *iexporter) stringOff(s string) uint64 { + off, ok := p.stringIndex[s] + if !ok { + off = uint64(p.strings.Len()) + p.stringIndex[s] = off + + p.strings.uint64(uint64(len(s))) + p.strings.WriteString(s) + } + return off +} + +// pushDecl adds n to the declaration work queue, if not already present. +func (p *iexporter) pushDecl(obj types.Object) { + // Package unsafe is known to the compiler and predeclared. + assert(obj.Pkg() != types.Unsafe) + + if _, ok := p.declIndex[obj]; ok { + return + } + + p.declIndex[obj] = ^uint64(0) // mark n present in work queue + p.declTodo.pushTail(obj) +} + +// exportWriter handles writing out individual data section chunks. +type exportWriter struct { + p *iexporter + + data intWriter + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (p *iexporter) doDecl(obj types.Object) { + w := p.newWriter() + w.setPkg(obj.Pkg(), false) + + switch obj := obj.(type) { + case *types.Var: + w.tag('V') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + + case *types.Func: + sig, _ := obj.Type().(*types.Signature) + if sig.Recv() != nil { + panic(internalErrorf("unexpected method: %v", sig)) + } + w.tag('F') + w.pos(obj.Pos()) + w.signature(sig) + + case *types.Const: + w.tag('C') + w.pos(obj.Pos()) + w.value(obj.Type(), obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + w.tag('A') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + break + } + + // Defined type. + w.tag('T') + w.pos(obj.Pos()) + + underlying := obj.Type().Underlying() + w.typ(underlying, obj.Pkg()) + + t := obj.Type() + if types.IsInterface(t) { + break + } + + named, ok := t.(*types.Named) + if !ok { + panic(internalErrorf("%s is not a defined type", t)) + } + + n := named.NumMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := named.Method(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.param(sig.Recv()) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected object: %v", obj)) + } + + p.declIndex[obj] = w.flush() +} + +func (w *exportWriter) tag(tag byte) { + w.data.WriteByte(tag) +} + +func (w *exportWriter) pos(pos token.Pos) { + p := w.p.fset.Position(pos) + file := p.Filename + line := int64(p.Line) + + // When file is the same as the last position (common case), + // we can save a few bytes by delta encoding just the line + // number. + // + // Note: Because data objects may be read out of order (or not + // at all), we can only apply delta encoding within a single + // object. This is handled implicitly by tracking prevFile and + // prevLine as fields of exportWriter. + + if file == w.prevFile { + delta := line - w.prevLine + w.int64(delta) + if delta == deltaNewFile { + w.int64(-1) + } + } else { + w.int64(deltaNewFile) + w.int64(line) // line >= 0 + w.string(file) + w.prevFile = file + } + w.prevLine = line +} + +func (w *exportWriter) pkg(pkg *types.Package) { + // Ensure any referenced packages are declared in the main index. + w.p.allPkgs[pkg] = true + + w.string(pkg.Path()) +} + +func (w *exportWriter) qualifiedIdent(obj types.Object) { + // Ensure any referenced declarations are written out too. + w.p.pushDecl(obj) + + w.string(obj.Name()) + w.pkg(obj.Pkg()) +} + +func (w *exportWriter) typ(t types.Type, pkg *types.Package) { + w.data.uint64(w.p.typOff(t, pkg)) +} + +func (p *iexporter) newWriter() *exportWriter { + return &exportWriter{p: p} +} + +func (w *exportWriter) flush() uint64 { + off := uint64(w.p.data0.Len()) + io.Copy(&w.p.data0, &w.data) + return off +} + +func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { + off, ok := p.typIndex[t] + if !ok { + w := p.newWriter() + w.doTyp(t, pkg) + off = predeclReserved + w.flush() + p.typIndex[t] = off + } + return off +} + +func (w *exportWriter) startType(k itag) { + w.data.uint64(uint64(k)) +} + +func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { + switch t := t.(type) { + case *types.Named: + w.startType(definedType) + w.qualifiedIdent(t.Obj()) + + case *types.Pointer: + w.startType(pointerType) + w.typ(t.Elem(), pkg) + + case *types.Slice: + w.startType(sliceType) + w.typ(t.Elem(), pkg) + + case *types.Array: + w.startType(arrayType) + w.uint64(uint64(t.Len())) + w.typ(t.Elem(), pkg) + + case *types.Chan: + w.startType(chanType) + // 1 RecvOnly; 2 SendOnly; 3 SendRecv + var dir uint64 + switch t.Dir() { + case types.RecvOnly: + dir = 1 + case types.SendOnly: + dir = 2 + case types.SendRecv: + dir = 3 + } + w.uint64(dir) + w.typ(t.Elem(), pkg) + + case *types.Map: + w.startType(mapType) + w.typ(t.Key(), pkg) + w.typ(t.Elem(), pkg) + + case *types.Signature: + w.startType(signatureType) + w.setPkg(pkg, true) + w.signature(t) + + case *types.Struct: + w.startType(structType) + w.setPkg(pkg, true) + + n := t.NumFields() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Field(i) + w.pos(f.Pos()) + w.string(f.Name()) + w.typ(f.Type(), pkg) + w.bool(f.Embedded()) + w.string(t.Tag(i)) // note (or tag) + } + + case *types.Interface: + w.startType(interfaceType) + w.setPkg(pkg, true) + + n := t.NumEmbeddeds() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Embedded(i) + w.pos(f.Obj().Pos()) + w.typ(f.Obj().Type(), f.Obj().Pkg()) + } + + n = t.NumExplicitMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := t.ExplicitMethod(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) + } +} + +func (w *exportWriter) setPkg(pkg *types.Package, write bool) { + if write { + w.pkg(pkg) + } + + w.currPkg = pkg +} + +func (w *exportWriter) signature(sig *types.Signature) { + w.paramList(sig.Params()) + w.paramList(sig.Results()) + if sig.Params().Len() > 0 { + w.bool(sig.Variadic()) + } +} + +func (w *exportWriter) paramList(tup *types.Tuple) { + n := tup.Len() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + w.param(tup.At(i)) + } +} + +func (w *exportWriter) param(obj types.Object) { + w.pos(obj.Pos()) + w.localIdent(obj) + w.typ(obj.Type(), obj.Pkg()) +} + +func (w *exportWriter) value(typ types.Type, v constant.Value) { + w.typ(typ, nil) + + switch v.Kind() { + case constant.Bool: + w.bool(constant.BoolVal(v)) + case constant.Int: + var i big.Int + if i64, exact := constant.Int64Val(v); exact { + i.SetInt64(i64) + } else if ui64, exact := constant.Uint64Val(v); exact { + i.SetUint64(ui64) + } else { + i.SetString(v.ExactString(), 10) + } + w.mpint(&i, typ) + case constant.Float: + f := constantToFloat(v) + w.mpfloat(f, typ) + case constant.Complex: + w.mpfloat(constantToFloat(constant.Real(v)), typ) + w.mpfloat(constantToFloat(constant.Imag(v)), typ) + case constant.String: + w.string(constant.StringVal(v)) + case constant.Unknown: + // package contains type errors + default: + panic(internalErrorf("unexpected value %v (%T)", v, v)) + } +} + +// constantToFloat converts a constant.Value with kind constant.Float to a +// big.Float. +func constantToFloat(x constant.Value) *big.Float { + assert(x.Kind() == constant.Float) + // Use the same floating-point precision (512) as cmd/compile + // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). + const mpprec = 512 + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + assert(ok) + } + return &f +} + +// mpint exports a multi-precision integer. +// +// For unsigned types, small values are written out as a single +// byte. Larger values are written out as a length-prefixed big-endian +// byte string, where the length prefix is encoded as its complement. +// For example, bytes 0, 1, and 2 directly represent the integer +// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, +// 2-, and 3-byte big-endian string follow. +// +// Encoding for signed types use the same general approach as for +// unsigned types, except small values use zig-zag encoding and the +// bottom bit of length prefix byte for large values is reserved as a +// sign bit. +// +// The exact boundary between small and large encodings varies +// according to the maximum number of bytes needed to encode a value +// of type typ. As a special case, 8-bit types are always encoded as a +// single byte. +// +// TODO(mdempsky): Is this level of complexity really worthwhile? +func (w *exportWriter) mpint(x *big.Int, typ types.Type) { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) + } + + signed, maxBytes := intSize(basic) + + negative := x.Sign() < 0 + if !signed && negative { + panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) + } + + b := x.Bytes() + if len(b) > 0 && b[0] == 0 { + panic(internalErrorf("leading zeros")) + } + if uint(len(b)) > maxBytes { + panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) + } + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + // Check if x can use small value encoding. + if len(b) <= 1 { + var ux uint + if len(b) == 1 { + ux = uint(b[0]) + } + if signed { + ux <<= 1 + if negative { + ux-- + } + } + if ux < maxSmall { + w.data.WriteByte(byte(ux)) + return + } + } + + n := 256 - uint(len(b)) + if signed { + n = 256 - 2*uint(len(b)) + if negative { + n |= 1 + } + } + if n < maxSmall || n >= 256 { + panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) + } + + w.data.WriteByte(byte(n)) + w.data.Write(b) +} + +// mpfloat exports a multi-precision floating point number. +// +// The number's value is decomposed into mantissa × 2**exponent, where +// mantissa is an integer. The value is written out as mantissa (as a +// multi-precision integer) and then the exponent, except exponent is +// omitted if mantissa is zero. +func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { + if f.IsInf() { + panic("infinite constant") + } + + // Break into f = mant × 2**exp, with 0.5 <= mant < 1. + var mant big.Float + exp := int64(f.MantExp(&mant)) + + // Scale so that mant is an integer. + prec := mant.MinPrec() + mant.SetMantExp(&mant, int(prec)) + exp -= int64(prec) + + manti, acc := mant.Int(nil) + if acc != big.Exact { + panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) + } + w.mpint(manti, typ) + if manti.Sign() != 0 { + w.int64(exp) + } +} + +func (w *exportWriter) bool(b bool) bool { + var x uint64 + if b { + x = 1 + } + w.uint64(x) + return b +} + +func (w *exportWriter) int64(x int64) { w.data.int64(x) } +func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } +func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } + +func (w *exportWriter) localIdent(obj types.Object) { + // Anonymous parameters. + if obj == nil { + w.string("") + return + } + + name := obj.Name() + if name == "_" { + w.string("_") + return + } + + w.string(name) +} + +type intWriter struct { + bytes.Buffer +} + +func (w *intWriter) int64(x int64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutVarint(buf[:], x) + w.Write(buf[:n]) +} + +func (w *intWriter) uint64(x uint64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutUvarint(buf[:], x) + w.Write(buf[:n]) +} + +func assert(cond bool) { + if !cond { + panic("internal error: assertion failed") + } +} + +// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. + +// objQueue is a FIFO queue of types.Object. The zero value of objQueue is +// a ready-to-use empty queue. +type objQueue struct { + ring []types.Object + head, tail int +} + +// empty returns true if q contains no Nodes. +func (q *objQueue) empty() bool { + return q.head == q.tail +} + +// pushTail appends n to the tail of the queue. +func (q *objQueue) pushTail(obj types.Object) { + if len(q.ring) == 0 { + q.ring = make([]types.Object, 16) + } else if q.head+len(q.ring) == q.tail { + // Grow the ring. + nring := make([]types.Object, len(q.ring)*2) + // Copy the old elements. + part := q.ring[q.head%len(q.ring):] + if q.tail-q.head <= len(part) { + part = part[:q.tail-q.head] + copy(nring, part) + } else { + pos := copy(nring, part) + copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) + } + q.ring, q.head, q.tail = nring, 0, q.tail-q.head + } + + q.ring[q.tail%len(q.ring)] = obj + q.tail++ +} + +// popHead pops a node from the head of the queue. It panics if q is empty. +func (q *objQueue) popHead() types.Object { + if q.empty() { + panic("dequeue empty") + } + obj := q.ring[q.head%len(q.ring)] + q.head++ + return obj +} diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go new file mode 100644 index 00000000..c3cd3efc --- /dev/null +++ b/internal/gcimporter/iexport_test.go @@ -0,0 +1,309 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a copy of bexport_test.go for iexport.go. + +//go:build go1.11 +// +build go1.11 + +package gcimporter_test + +import ( + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/parser" + "go/token" + "go/types" + "math/big" + "reflect" + "runtime" + "sort" + "strings" + "testing" + + "github.com/visualfc/gocode/internal/gcimporter" + "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/go/loader" +) + +func TestIExportData_stdlib(t *testing.T) { + if runtime.Compiler == "gccgo" { + t.Skip("gccgo standard library is inaccessible") + } + if runtime.GOOS == "android" { + t.Skipf("incomplete std lib on %s", runtime.GOOS) + } + if isRace { + t.Skipf("stdlib tests take too long in race mode and flake on builders") + } + + // Load, parse and type-check the program. + ctxt := build.Default // copy + ctxt.GOPATH = "" // disable GOPATH + conf := loader.Config{ + Build: &ctxt, + AllowErrors: true, + } + for _, path := range buildutil.AllPackages(conf.Build) { + conf.Import(path) + } + + // Create a package containing type and value errors to ensure + // they are properly encoded/decoded. + f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors +const UnknownValue = "" + 0 +type UnknownType undefined +`) + if err != nil { + t.Fatal(err) + } + conf.CreateFromFiles("haserrors", f) + + prog, err := conf.Load() + if err != nil { + t.Fatalf("Load failed: %v", err) + } + + numPkgs := len(prog.AllPackages) + if want := 248; numPkgs < want { + t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) + } + + var sorted []*types.Package + for pkg := range prog.AllPackages { + sorted = append(sorted, pkg) + } + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].Path() < sorted[j].Path() + }) + + for _, pkg := range sorted { + info := prog.AllPackages[pkg] + if info.Files == nil { + continue // empty directory + } + exportdata, err := gcimporter.IExportData(conf.Fset, pkg) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + n, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Errorf("IImportData(%s): %v", pkg.Path(), err) + continue + } + if n != len(exportdata) { + t.Errorf("IImportData(%s) decoded %d bytes, want %d", + pkg.Path(), n, len(exportdata)) + } + + // Compare the packages' corresponding members. + for _, name := range pkg.Scope().Names() { + if !ast.IsExported(name) { + continue + } + obj1 := pkg.Scope().Lookup(name) + obj2 := pkg2.Scope().Lookup(name) + if obj2 == nil { + t.Fatalf("%s.%s not found, want %s", pkg.Path(), name, obj1) + continue + } + + fl1 := fileLine(conf.Fset, obj1) + fl2 := fileLine(fset2, obj2) + if fl1 != fl2 { + t.Errorf("%s.%s: got posn %s, want %s", + pkg.Path(), name, fl2, fl1) + } + + if err := cmpObj(obj1, obj2); err != nil { + t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", + pkg.Path(), name, err, obj2, obj1) + } + } + } +} + +// TestVeryLongFile tests the position of an import object declared in +// a very long input file. Line numbers greater than maxlines are +// reported as line 1, not garbage or token.NoPos. +func TestIExportData_long(t *testing.T) { + // parse and typecheck + longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + // export + exportdata, err := gcimporter.IExportData(fset1, pkg) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Fatalf("IImportData(%s): %v", pkg.Path(), err) + } + + // compare + posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) + posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) + if want := "foo.go:1:1"; posn2.String() != want { + t.Errorf("X position = %s, want %s (orig was %s)", + posn2, want, posn1) + } +} + +func TestIExportData_typealiases(t *testing.T) { + // parse and typecheck + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "p.go", src, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) + if err == nil { + // foo in undeclared in src; we should see an error + t.Fatal("invalid source type-checked without error") + } + if pkg1 == nil { + // despite incorrect src we should see a (partially) type-checked package + t.Fatal("nil package returned") + } + checkPkg(t, pkg1, "export") + + // export + exportdata, err := gcimporter.IExportData(fset1, pkg1) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path()) + if err != nil { + t.Fatalf("IImportData(%s): %v", pkg1.Path(), err) + } + checkPkg(t, pkg2, "import") +} + +// cmpObj reports how x and y differ. They are assumed to belong to different +// universes so cannot be compared directly. It is an adapted version of +// equalObj in bexport_test.go. +func cmpObj(x, y types.Object) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("%T vs %T", x, y) + } + xt := x.Type() + yt := y.Type() + switch x.(type) { + case *types.Var, *types.Func: + // ok + case *types.Const: + xval := x.(*types.Const).Val() + yval := y.(*types.Const).Val() + equal := constant.Compare(xval, token.EQL, yval) + if !equal { + // try approx. comparison + xkind := xval.Kind() + ykind := yval.Kind() + if xkind == constant.Complex || ykind == constant.Complex { + equal = same(constant.Real(xval), constant.Real(yval)) && + same(constant.Imag(xval), constant.Imag(yval)) + } else if xkind == constant.Float || ykind == constant.Float { + equal = same(xval, yval) + } else if xkind == constant.Unknown && ykind == constant.Unknown { + equal = true + } + } + if !equal { + return fmt.Errorf("unequal constants %s vs %s", xval, yval) + } + case *types.TypeName: + xt = xt.Underlying() + yt = yt.Underlying() + default: + return fmt.Errorf("unexpected %T", x) + } + return equalType(xt, yt) +} + +// Use the same floating-point precision (512) as cmd/compile +// (see Mpprec in cmd/compile/internal/gc/mpfloat.go). +const mpprec = 512 + +// same compares non-complex numeric values and reports if they are approximately equal. +func same(x, y constant.Value) bool { + xf := constantToFloat(x) + yf := constantToFloat(y) + d := new(big.Float).Sub(xf, yf) + d.Abs(d) + eps := big.NewFloat(1.0 / (1 << (mpprec - 1))) // allow for 1 bit of error + return d.Cmp(eps) < 0 +} + +// copy of the function with the same name in iexport.go. +func constantToFloat(x constant.Value) *big.Float { + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + if !ok { + panic("should not reach here") + } + } + return &f +} + +// copy of the function with the same name in iexport.go. +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go new file mode 100644 index 00000000..3cb7ae5b --- /dev/null +++ b/internal/gcimporter/iimport.go @@ -0,0 +1,606 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed package import. +// See cmd/compile/internal/gc/iexport.go for the export data format. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "io" + "sort" +) + +type intReader struct { + *bytes.Reader + path string +} + +func (r *intReader) int64() int64 { + i, err := binary.ReadVarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +func (r *intReader) uint64() uint64 { + i, err := binary.ReadUvarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +const predeclReserved = 32 + +type itag uint64 + +const ( + // Types + definedType itag = iota + pointerType + sliceType + arrayType + chanType + mapType + signatureType + structType + interfaceType +) + +// IImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + const currentVersion = 0 + version := -1 + defer func() { + if e := recover(); e != nil { + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + r := &intReader{bytes.NewReader(data), path} + + version = int(r.uint64()) + switch version { + case currentVersion: + default: + errorf("unknown iexport format version %d", version) + } + + sLen := int64(r.uint64()) + dLen := int64(r.uint64()) + + whence, _ := r.Seek(0, io.SeekCurrent) + stringData := data[whence : whence+sLen] + declData := data[whence+sLen : whence+sLen+dLen] + r.Seek(sLen+dLen, io.SeekCurrent) + + p := iimporter{ + ipath: path, + + stringData: stringData, + stringCache: make(map[uint64]string), + pkgCache: make(map[uint64]*types.Package), + + declData: declData, + pkgIndex: make(map[*types.Package]map[string]uint64), + typCache: make(map[uint64]types.Type), + + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + for i, pt := range predeclared() { + p.typCache[uint64(i)] = pt + } + + pkgList := make([]*types.Package, r.uint64()) + for i := range pkgList { + pkgPathOff := r.uint64() + pkgPath := p.stringAt(pkgPathOff) + pkgName := p.stringAt(r.uint64()) + _ = r.uint64() // package height; unused by go/types + + if pkgPath == "" { + pkgPath = path + } + pkg := imports[pkgPath] + if pkg == nil { + pkg = types.NewPackage(pkgPath, pkgName) + imports[pkgPath] = pkg + } else if pkg.Name() != pkgName { + errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) + } + + p.pkgCache[pkgPathOff] = pkg + + nameIndex := make(map[string]uint64) + for nSyms := r.uint64(); nSyms > 0; nSyms-- { + name := p.stringAt(r.uint64()) + nameIndex[name] = r.uint64() + } + + p.pkgIndex[pkg] = nameIndex + pkgList[i] = pkg + } + var localpkg *types.Package + for _, pkg := range pkgList { + if pkg.Path() == path { + localpkg = pkg + } + } + + names := make([]string, 0, len(p.pkgIndex[localpkg])) + for name := range p.pkgIndex[localpkg] { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + p.doDecl(localpkg, name) + } + + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), pkgList[1:]...) + sort.Sort(byPath(list)) + localpkg.SetImports(list) + + // package was imported completely and without errors + localpkg.MarkComplete() + + consumed, _ := r.Seek(0, io.SeekCurrent) + return int(consumed), localpkg, nil +} + +type iimporter struct { + ipath string + + stringData []byte + stringCache map[uint64]string + pkgCache map[uint64]*types.Package + + declData []byte + pkgIndex map[*types.Package]map[string]uint64 + typCache map[uint64]types.Type + + fake fakeFileSet + interfaceList []*types.Interface +} + +func (p *iimporter) doDecl(pkg *types.Package, name string) { + // See if we've already imported this declaration. + if obj := pkg.Scope().Lookup(name); obj != nil { + return + } + + off, ok := p.pkgIndex[pkg][name] + if !ok { + errorf("%v.%v not in index", pkg, name) + } + + r := &importReader{p: p, currPkg: pkg} + r.declReader.Reset(p.declData[off:]) + + r.obj(name) +} + +func (p *iimporter) stringAt(off uint64) string { + if s, ok := p.stringCache[off]; ok { + return s + } + + slen, n := binary.Uvarint(p.stringData[off:]) + if n <= 0 { + errorf("varint failed") + } + spos := off + uint64(n) + s := string(p.stringData[spos : spos+slen]) + p.stringCache[off] = s + return s +} + +func (p *iimporter) pkgAt(off uint64) *types.Package { + if pkg, ok := p.pkgCache[off]; ok { + return pkg + } + path := p.stringAt(off) + errorf("missing package %q in %q", path, p.ipath) + return nil +} + +func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { + if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { + return t + } + + if off < predeclReserved { + errorf("predeclared type missing from cache: %v", off) + } + + r := &importReader{p: p} + r.declReader.Reset(p.declData[off-predeclReserved:]) + t := r.doType(base) + + if base == nil || !isInterface(t) { + p.typCache[off] = t + } + return t +} + +type importReader struct { + p *iimporter + declReader bytes.Reader + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (r *importReader) obj(name string) { + tag := r.byte() + pos := r.pos() + + switch tag { + case 'A': + typ := r.typ() + + r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) + + case 'C': + typ, val := r.value() + + r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) + + case 'F': + sig := r.signature(nil) + + r.declare(types.NewFunc(pos, r.currPkg, name, sig)) + + case 'T': + // Types can be recursive. We need to setup a stub + // declaration before recursing. + obj := types.NewTypeName(pos, r.currPkg, name, nil) + named := types.NewNamed(obj, nil, nil) + r.declare(obj) + + underlying := r.p.typAt(r.uint64(), named).Underlying() + named.SetUnderlying(underlying) + + if !isInterface(underlying) { + for n := r.uint64(); n > 0; n-- { + mpos := r.pos() + mname := r.ident() + recv := r.param() + msig := r.signature(recv) + + named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) + } + } + + case 'V': + typ := r.typ() + + r.declare(types.NewVar(pos, r.currPkg, name, typ)) + + default: + errorf("unexpected tag: %v", tag) + } +} + +func (r *importReader) declare(obj types.Object) { + obj.Pkg().Scope().Insert(obj) +} + +func (r *importReader) value() (typ types.Type, val constant.Value) { + typ = r.typ() + + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { + case types.IsBoolean: + val = constant.MakeBool(r.bool()) + + case types.IsString: + val = constant.MakeString(r.string()) + + case types.IsInteger: + val = r.mpint(b) + + case types.IsFloat: + val = r.mpfloat(b) + + case types.IsComplex: + re := r.mpfloat(b) + im := r.mpfloat(b) + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + default: + if b.Kind() == types.Invalid { + val = constant.MakeUnknown() + return + } + errorf("unexpected type %v", typ) // panics + panic("unreachable") + } + + return +} + +func intSize(b *types.Basic) (signed bool, maxBytes uint) { + if (b.Info() & types.IsUntyped) != 0 { + return true, 64 + } + + switch b.Kind() { + case types.Float32, types.Complex64: + return true, 3 + case types.Float64, types.Complex128: + return true, 7 + } + + signed = (b.Info() & types.IsUnsigned) == 0 + switch b.Kind() { + case types.Int8, types.Uint8: + maxBytes = 1 + case types.Int16, types.Uint16: + maxBytes = 2 + case types.Int32, types.Uint32: + maxBytes = 4 + default: + maxBytes = 8 + } + + return +} + +func (r *importReader) mpint(b *types.Basic) constant.Value { + signed, maxBytes := intSize(b) + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + n, _ := r.declReader.ReadByte() + if uint(n) < maxSmall { + v := int64(n) + if signed { + v >>= 1 + if n&1 != 0 { + v = ^v + } + } + return constant.MakeInt64(v) + } + + v := -n + if signed { + v = -(n &^ 1) >> 1 + } + if v < 1 || uint(v) > maxBytes { + errorf("weird decoding: %v, %v => %v", n, signed, v) + } + + buf := make([]byte, v) + io.ReadFull(&r.declReader, buf) + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 { + buf[i], buf[j] = buf[j], buf[i] + } + + x := constant.MakeFromBytes(buf) + if signed && n&1 != 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +func (r *importReader) mpfloat(b *types.Basic) constant.Value { + x := r.mpint(b) + if constant.Sign(x) == 0 { + return x + } + + exp := r.int64() + switch { + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + } + return x +} + +func (r *importReader) ident() string { + return r.string() +} + +func (r *importReader) qualifiedIdent() (*types.Package, string) { + name := r.string() + pkg := r.pkg() + return pkg, name +} + +func (r *importReader) pos() token.Pos { + delta := r.int64() + if delta != deltaNewFile { + r.prevLine += delta + } else if l := r.int64(); l == -1 { + r.prevLine += deltaNewFile + } else { + r.prevFile = r.string() + r.prevLine = l + } + + if r.prevFile == "" && r.prevLine == 0 { + return token.NoPos + } + + return r.p.fake.pos(r.prevFile, int(r.prevLine)) +} + +func (r *importReader) typ() types.Type { + return r.p.typAt(r.uint64(), nil) +} + +func isInterface(t types.Type) bool { + _, ok := t.(*types.Interface) + return ok +} + +func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } +func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } + +func (r *importReader) doType(base *types.Named) types.Type { + switch k := r.kind(); k { + default: + errorf("unexpected kind tag in %q: %v", r.p.ipath, k) + return nil + + case definedType: + pkg, name := r.qualifiedIdent() + r.p.doDecl(pkg, name) + return pkg.Scope().Lookup(name).(*types.TypeName).Type() + case pointerType: + return types.NewPointer(r.typ()) + case sliceType: + return types.NewSlice(r.typ()) + case arrayType: + n := r.uint64() + return types.NewArray(r.typ(), int64(n)) + case chanType: + dir := chanDir(int(r.uint64())) + return types.NewChan(dir, r.typ()) + case mapType: + return types.NewMap(r.typ(), r.typ()) + case signatureType: + r.currPkg = r.pkg() + return r.signature(nil) + + case structType: + r.currPkg = r.pkg() + + fields := make([]*types.Var, r.uint64()) + tags := make([]string, len(fields)) + for i := range fields { + fpos := r.pos() + fname := r.ident() + ftyp := r.typ() + emb := r.bool() + tag := r.string() + + fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb) + tags[i] = tag + } + return types.NewStruct(fields, tags) + + case interfaceType: + r.currPkg = r.pkg() + + embeddeds := make([]types.Type, r.uint64()) + for i := range embeddeds { + _ = r.pos() + embeddeds[i] = r.typ() + } + + methods := make([]*types.Func, r.uint64()) + for i := range methods { + mpos := r.pos() + mname := r.ident() + + // TODO(mdempsky): Matches bimport.go, but I + // don't agree with this. + var recv *types.Var + if base != nil { + recv = types.NewVar(token.NoPos, r.currPkg, "", base) + } + + msig := r.signature(recv) + methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) + } + + typ := newInterface(methods, embeddeds) + r.p.interfaceList = append(r.p.interfaceList, typ) + return typ + } +} + +func (r *importReader) kind() itag { + return itag(r.uint64()) +} + +func (r *importReader) signature(recv *types.Var) *types.Signature { + params := r.paramList() + results := r.paramList() + variadic := params.Len() > 0 && r.bool() + return types.NewSignature(recv, params, results, variadic) +} + +func (r *importReader) paramList() *types.Tuple { + xs := make([]*types.Var, r.uint64()) + for i := range xs { + xs[i] = r.param() + } + return types.NewTuple(xs...) +} + +func (r *importReader) param() *types.Var { + pos := r.pos() + name := r.ident() + typ := r.typ() + return types.NewParam(pos, r.currPkg, name, typ) +} + +func (r *importReader) bool() bool { + return r.uint64() != 0 +} + +func (r *importReader) int64() int64 { + n, err := binary.ReadVarint(&r.declReader) + if err != nil { + errorf("readVarint: %v", err) + } + return n +} + +func (r *importReader) uint64() uint64 { + n, err := binary.ReadUvarint(&r.declReader) + if err != nil { + errorf("readUvarint: %v", err) + } + return n +} + +func (r *importReader) byte() byte { + x, err := r.declReader.ReadByte() + if err != nil { + errorf("declReader.ReadByte: %v", err) + } + return x +} diff --git a/internal/gcimporter/israce_test.go b/internal/gcimporter/israce_test.go new file mode 100644 index 00000000..af8e52b2 --- /dev/null +++ b/internal/gcimporter/israce_test.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build race + +package gcimporter_test + +func init() { + isRace = true +} diff --git a/internal/gcimporter/newInterface10.go b/internal/gcimporter/newInterface10.go new file mode 100644 index 00000000..463f2522 --- /dev/null +++ b/internal/gcimporter/newInterface10.go @@ -0,0 +1,21 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + named := make([]*types.Named, len(embeddeds)) + for i, e := range embeddeds { + var ok bool + named[i], ok = e.(*types.Named) + if !ok { + panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") + } + } + return types.NewInterface(methods, named) +} diff --git a/internal/gcimporter/newInterface11.go b/internal/gcimporter/newInterface11.go new file mode 100644 index 00000000..ab28b95c --- /dev/null +++ b/internal/gcimporter/newInterface11.go @@ -0,0 +1,13 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + return types.NewInterfaceType(methods, embeddeds) +} diff --git a/internal/gcimporter/testdata/a.go b/internal/gcimporter/testdata/a.go new file mode 100644 index 00000000..56e4292c --- /dev/null +++ b/internal/gcimporter/testdata/a.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package a + +import "encoding/json" + +type A struct { + a *A + json json.RawMessage +} diff --git a/internal/gcimporter/testdata/b.go b/internal/gcimporter/testdata/b.go new file mode 100644 index 00000000..41966782 --- /dev/null +++ b/internal/gcimporter/testdata/b.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package b + +import "./a" + +type A a.A diff --git a/internal/gcimporter/testdata/exports.go b/internal/gcimporter/testdata/exports.go new file mode 100644 index 00000000..8ee28b09 --- /dev/null +++ b/internal/gcimporter/testdata/exports.go @@ -0,0 +1,89 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is used to generate an object file which +// serves as test file for gcimporter_test.go. + +package exports + +import ( + "go/ast" +) + +// Issue 3682: Correctly read dotted identifiers from export data. +const init1 = 0 + +func init() {} + +const ( + C0 int = 0 + C1 = 3.14159265 + C2 = 2.718281828i + C3 = -123.456e-789 + C4 = +123.456E+789 + C5 = 1234i + C6 = "foo\n" + C7 = `bar\n` +) + +type ( + T1 int + T2 [10]int + T3 []int + T4 *int + T5 chan int + T6a chan<- int + T6b chan (<-chan int) + T6c chan<- (chan int) + T7 <-chan *ast.File + T8 struct{} + T9 struct { + a int + b, c float32 + d []string `go:"tag"` + } + T10 struct { + T8 + T9 + _ *T10 + } + T11 map[int]string + T12 interface{} + T13 interface { + m1() + m2(int) float32 + } + T14 interface { + T12 + T13 + m3(x ...struct{}) []T9 + } + T15 func() + T16 func(int) + T17 func(x int) + T18 func() float32 + T19 func() (x float32) + T20 func(...interface{}) + T21 struct{ next *T21 } + T22 struct{ link *T23 } + T23 struct{ link *T22 } + T24 *T24 + T25 *T26 + T26 *T27 + T27 *T25 + T28 func(T28) T28 +) + +var ( + V0 int + V1 = -991.0 +) + +func F1() {} +func F2(x int) {} +func F3() int { return 0 } +func F4() float32 { return 0 } +func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10) + +func (p *T1) M1() diff --git a/internal/gcimporter/testdata/issue15920.go b/internal/gcimporter/testdata/issue15920.go new file mode 100644 index 00000000..c70f7d82 --- /dev/null +++ b/internal/gcimporter/testdata/issue15920.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +// The underlying type of Error is the underlying type of error. +// Make sure we can import this again without problems. +type Error error + +func F() Error { return nil } diff --git a/internal/gcimporter/testdata/issue20046.go b/internal/gcimporter/testdata/issue20046.go new file mode 100644 index 00000000..c63ee821 --- /dev/null +++ b/internal/gcimporter/testdata/issue20046.go @@ -0,0 +1,9 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +var V interface { + M() +} diff --git a/internal/gcimporter/testdata/issue25301.go b/internal/gcimporter/testdata/issue25301.go new file mode 100644 index 00000000..e3dc98b4 --- /dev/null +++ b/internal/gcimporter/testdata/issue25301.go @@ -0,0 +1,17 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package issue25301 + +type ( + A = interface { + M() + } + T interface { + A + } + S struct{} +) + +func (S) M() { println("m") } diff --git a/internal/gcimporter/testdata/p.go b/internal/gcimporter/testdata/p.go new file mode 100644 index 00000000..9e2e7057 --- /dev/null +++ b/internal/gcimporter/testdata/p.go @@ -0,0 +1,13 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue15517 + +package p + +const C = 0 + +var V int + +func F() {} diff --git a/internal/gcimporter/testdata/versions/test.go b/internal/gcimporter/testdata/versions/test.go new file mode 100644 index 00000000..6362adc2 --- /dev/null +++ b/internal/gcimporter/testdata/versions/test.go @@ -0,0 +1,30 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/testdata/versions.test.go. + +// To create a test case for a new export format version, +// build this package with the latest compiler and store +// the resulting .a file appropriately named in the versions +// directory. The VersionHandling test will pick it up. +// +// In the testdata/versions: +// +// go build -o test_go1.$X_$Y.a test.go +// +// with $X = Go version and $Y = export format version +// (add 'b' or 'i' to distinguish between binary and +// indexed format starting with 1.11 as long as both +// formats are supported). +// +// Make sure this source is extended such that it exercises +// whatever export format change has taken place. + +package test + +// Any release before and including Go 1.7 didn't encode +// the package for a blank struct field. +type BlankField struct { + _ int +} diff --git a/internal/gcimporter/typeparams_go117.go b/internal/gcimporter/typeparams_go117.go new file mode 100644 index 00000000..9d491144 --- /dev/null +++ b/internal/gcimporter/typeparams_go117.go @@ -0,0 +1,82 @@ +//go:build !go1.18 +// +build !go1.18 + +package gcimporter + +import ( + "go/types" +) + +func unsupported() { + panic("type parameters are unsupported at this go version") +} + +// TypeParam is a placeholder type, as type parameters are not supported at +// this Go version. Its methods panic on use. +type TypeParam struct{ types.Type } + +func (*TypeParam) Index() int { unsupported(); return 0 } +func (*TypeParam) Constraint() types.Type { unsupported(); return nil } +func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } + +type TypeParamList struct{} + +func (*TypeParamList) Len() int { return 0 } +func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil } + +func typeParamsForNamed(named *types.Named) *TypeParamList { + return nil +} + +func typeParamsForRecv(sig *types.Signature) *TypeParamList { + return nil +} + +func typeParamsForSig(sig *types.Signature) *TypeParamList { + return nil +} + +func typeParamsToTuple(tparams *TypeParamList) *types.Tuple { + return types.NewTuple() +} + +func originType(t types.Type) types.Type { + return t +} + +// Term holds information about a structural type restriction. +type Term struct { + tilde bool + typ types.Type +} + +func (m *Term) Tilde() bool { return m.tilde } +func (m *Term) Type() types.Type { return m.typ } +func (m *Term) String() string { + pre := "" + if m.tilde { + pre = "~" + } + return pre + m.typ.String() +} + +// NewTerm creates a new placeholder term type. +func NewTerm(tilde bool, typ types.Type) *Term { + return &Term{tilde, typ} +} + +// Union is a placeholder type, as type parameters are not supported at this Go +// version. Its methods panic on use. +type Union struct{ types.Type } + +func (*Union) String() string { unsupported(); return "" } +func (*Union) Underlying() types.Type { unsupported(); return nil } +func (*Union) Len() int { return 0 } +func (*Union) Term(i int) *Term { unsupported(); return nil } + +var comparableType = comparable{} + +type comparable struct{} + +func (t comparable) Underlying() types.Type { return t } +func (t comparable) String() string { return "comparable" } diff --git a/internal/gcimporter/typeparams_go118.go b/internal/gcimporter/typeparams_go118.go new file mode 100644 index 00000000..c79f84c1 --- /dev/null +++ b/internal/gcimporter/typeparams_go118.go @@ -0,0 +1,50 @@ +//go:build go1.18 +// +build go1.18 + +package gcimporter + +import ( + "go/types" +) + +type TypeParam = types.TypeParam +type TypeParamList = types.TypeParamList + +func typeParamsForNamed(named *types.Named) *types.TypeParamList { + return named.TypeParams() +} + +func typeParamsForRecv(sig *types.Signature) *types.TypeParamList { + return sig.RecvTypeParams() +} + +func typeParamsForSig(sig *types.Signature) *types.TypeParamList { + return sig.TypeParams() +} + +func typeParamsToTuple(tparams *types.TypeParamList) *types.Tuple { + if tparams == nil { + return types.NewTuple() + } + n := tparams.Len() + ar := make([]*types.Var, n) + for i := 0; i < n; i++ { + tp := tparams.At(i) + obj := tp.Obj() + ar[i] = types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), tp.Constraint()) + } + return types.NewTuple(ar...) +} + +func originType(t types.Type) types.Type { + if named, ok := t.(*types.Named); ok { + t = named.Origin() + } + return t +} + +type Union = types.Union +type Term = types.Term + +// comparable +var comparableType = types.Universe.Lookup("comparable").Type() diff --git a/package.go b/package.go index 687cb805..516cb9b4 100644 --- a/package.go +++ b/package.go @@ -4,8 +4,13 @@ import ( "bytes" "fmt" "go/ast" - "os" + "go/token" + "go/types" + "log" "strings" + + "github.com/visualfc/gocode/internal/gcexportdata" + "golang.org/x/tools/go/types/typeutil" ) type package_parser interface { @@ -22,6 +27,7 @@ type package_parser interface { type package_file_cache struct { name string // file name import_name string + vendor_name string mtime int64 defalias string @@ -30,10 +36,11 @@ type package_file_cache struct { others map[string]*decl } -func new_package_file_cache(absname, name string) *package_file_cache { +func new_package_file_cache(absname, name string, vname string) *package_file_cache { m := new(package_file_cache) m.name = absname m.import_name = name + m.vendor_name = vname m.mtime = 0 m.defalias = "" return m @@ -71,37 +78,190 @@ func (m *package_file_cache) find_file() string { return m.name } -func (m *package_file_cache) update_cache() { +func (m *package_file_cache) update_cache(c *auto_complete_context) { if m.mtime == -1 { return } - fname := m.find_file() - stat, err := os.Stat(fname) - if err != nil { + // defer func() { + // if err := recover(); err != nil { + // log.Println("update_cache recover error:", err) + // } + // }() + + import_path := m.import_name + if m.vendor_name != "" { + import_path = m.vendor_name + } + if pkg := c.typesWalker.Imported[import_path]; pkg != nil { + if pkg.Name() == "" { + log.Println("error parser", import_path) + return + } + if chk, ok := c.typesWalker.ImportedFilesCheck[import_path]; ok { + if m.mtime == chk.ModTime { + return + } + m.mtime = chk.ModTime + } + m.process_package_types(c, pkg) return } + m.process_package_data(c, nil, true) + + // fname := m.find_file() + // stat, err := os.Stat(fname) + + // if err != nil { + // m.process_package_data(c, nil, true) + // return + // } + // statmtime := stat.ModTime().UnixNano() + // if m.mtime != statmtime { + // m.mtime = statmtime + // data, err := file_reader.read_file(fname) + // if err != nil { + // return + // } + // m.process_package_data(c, data, false) + // } +} - statmtime := stat.ModTime().UnixNano() - if m.mtime != statmtime { - m.mtime = statmtime +type types_export struct { + pkg *types.Package + pfc *package_file_cache +} - data, err := file_reader.read_file(fname) - if err != nil { - return +func (p *types_export) init(pkg *types.Package, pfc *package_file_cache) { + p.pkg = pkg + p.pfc = pfc + pfc.defalias = pkg.Name() + for _, pkg := range p.pkg.Imports() { + pkgid := "!" + pkg.Path() + "!" + pkg.Name() + pfc.add_package_to_scope(pkgid, pkg.Path()) + } + pkgid := "!" + pkg.Path() + "!" + pkg.Name() + pfc.add_package_to_scope(pkgid, pkg.Path()) +} + +func (p *types_export) parse_export(callback func(pkg string, decl ast.Decl)) { + for _, pkg := range p.pkg.Imports() { + pkg_parse_export(pkg, p.pfc, callback) + } + pkg_parse_export(p.pkg, p.pfc, callback) +} + +func pkg_parse_export(pkg *types.Package, pfc *package_file_cache, callback func(pkg string, decl ast.Decl)) { + pkgid := "!" + pkg.Path() + "!" + pkg.Name() + //pfc.add_package_to_scope(pkgid, pkg.Path()) + + for _, name := range pkg.Scope().Names() { + if obj := pkg.Scope().Lookup(name); obj != nil { + if !obj.Exported() { + continue + } + name := obj.Name() + var decl ast.Decl + switch t := obj.(type) { + case *types.Const: + decl = &ast.GenDecl{ + Tok: token.CONST, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: toType(pkg, t.Type()), + }, + }, + } + case *types.Var: + decl = &ast.GenDecl{ + Tok: token.VAR, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: toType(pkg, t.Type()), + }, + }, + } + case *types.TypeName: + decl = &ast.GenDecl{ + Tok: token.TYPE, + Specs: []ast.Spec{toTypeSpec(pkg, t)}, + } + if named, ok := t.Type().(*types.Named); ok { + for _, sel := range typeutil.IntuitiveMethodSet(named, nil) { + sig := sel.Type().(*types.Signature) + decl := &ast.FuncDecl{ + Recv: toRecv(pkg, sig.Recv()), + Name: ast.NewIdent(sel.Obj().Name()), + Type: toFuncType(pkg, sig), + } + callback(pkgid, decl) + } + } + case *types.Func: + sig := t.Type().(*types.Signature) + decl = &ast.FuncDecl{ + Recv: toRecv(pkg, sig.Recv()), + Name: ast.NewIdent(name), + Type: toFuncType(pkg, sig), + } + } + callback(pkgid, decl) } - m.process_package_data(data) } } -func (m *package_file_cache) process_package_data(data []byte) { +func (m *package_file_cache) process_package_types(c *auto_complete_context, pkg *types.Package) { m.scope = new_named_scope(g_universe_scope, m.name) - // find import section - i := bytes.Index(data, []byte{'\n', '$', '$'}) - if i == -1 { - panic(fmt.Sprintf("Can't find the import section in the package file %s", m.name)) + // main package + m.main = new_decl(m.name, decl_package, nil) + // create map for other packages + m.others = make(map[string]*decl) + + var pp package_parser + fset := token.NewFileSet() + var buf bytes.Buffer + gcexportdata.Write(&buf, fset, pkg) + var p gc_bin_parser + p.init(buf.Bytes(), m) + pp = &p + + prefix := "!" + m.name + "!" + //log.Println("load pkg", pkg.Path(), pkg.Imports()) + pp.parse_export(func(pkg string, decl ast.Decl) { + anonymify_ast(decl, decl_foreign, m.scope) + if pkg == "" || strings.HasPrefix(pkg, prefix) { + // main package + add_ast_decl_to_package(m.main, decl, m.scope) + } else { + // others + if _, ok := m.others[pkg]; !ok { + m.others[pkg] = new_decl(pkg, decl_package, nil) + } + add_ast_decl_to_package(m.others[pkg], decl, m.scope) + } + }) + + // hack, add ourselves to the package scope + mainName := "!" + m.name + "!" + m.defalias + m.add_package_to_scope(mainName, m.name) + + // replace dummy package decls in package scope to actual packages + for key := range m.scope.entities { + if !strings.HasPrefix(key, "!") { + continue + } + pkg, ok := m.others[key] + if !ok && key == mainName { + pkg = m.main + } + m.scope.replace_decl(key, pkg) } - data = data[i+len("\n$$"):] +} + +func (m *package_file_cache) process_package_data(c *auto_complete_context, data []byte, source bool) { + m.scope = new_named_scope(g_universe_scope, m.name) // main package m.main = new_decl(m.name, decl_package, nil) @@ -109,23 +269,65 @@ func (m *package_file_cache) process_package_data(data []byte) { m.others = make(map[string]*decl) var pp package_parser - if data[0] == 'B' { - // binary format, skip 'B\n' - data = data[2:] + if source { + var tp types_parser + var srcDir string + importPath := m.import_name + if m.vendor_name != "" { + importPath = m.vendor_name + } + tp.initSource(m.import_name, importPath, srcDir, m, c) + if tp.pkg != nil && tp.pkg.Name() == "" { + log.Println("error parser data source", importPath) + return + } + data = tp.exportData() + if *g_debug { + log.Printf("parser source %q %q\n", importPath, srcDir) + } + if data == nil { + log.Println("error parser data source", importPath) + return + } var p gc_bin_parser p.init(data, m) pp = &p } else { - // textual format, find the beginning of the package clause - i = bytes.Index(data, []byte{'p', 'a', 'c', 'k', 'a', 'g', 'e'}) + i := bytes.Index(data, []byte{'\n', '$', '$'}) if i == -1 { - panic("Can't find the package clause") + panic(fmt.Sprintf("Can't find the import section in the package file %s", m.name)) } - data = data[i:] + offset := i + len("\n$$") + if data[offset] == 'B' { + // binary format, skip 'B\n' + //data = data[2:] + if data[offset+2] == 'i' { + var tp types_parser + tp.initData(m.import_name, data, m, c) + data = tp.exportData() + if data == nil { + log.Println("error parser data binary", m.import_name) + return + } + } else { + data = data[offset+2:] + } + var p gc_bin_parser + p.init(data, m) + pp = &p + } else { + data = data[offset:] + // textual format, find the beginning of the package clause + i := bytes.Index(data, []byte{'p', 'a', 'c', 'k', 'a', 'g', 'e'}) + if i == -1 { + panic("Can't find the package clause") + } + data = data[i:] - var p gc_parser - p.init(data, m) - pp = &p + var p gc_parser + p.init(data, m) + pp = &p + } } prefix := "!" + m.name + "!" @@ -168,13 +370,15 @@ func (m *package_file_cache) add_package_to_scope(alias, realname string) { func add_ast_decl_to_package(pkg *decl, decl ast.Decl, scope *scope) { foreach_decl(decl, func(data *foreach_decl_struct) { class := ast_decl_class(data.decl) + typeparams := ast_decl_typeparams(data.decl) for i, name := range data.names { typ, v, vi := data.type_value_index(i) d := new_decl_full(name.Name, class, decl_foreign|ast_decl_flags(data.decl), typ, v, vi, scope) if d == nil { - return + continue } + d.typeparams = typeparams if !name.IsExported() && d.class != decl_type { return @@ -228,27 +432,15 @@ func (c package_cache) append_packages(ps map[string]*package_file_cache, pkgs [ if mod, ok := c[m.abspath]; ok { ps[m.abspath] = mod } else { - mod = new_package_file_cache(m.abspath, m.path) + mod = new_package_file_cache(m.abspath, m.path, m.vpath) ps[m.abspath] = mod c[m.abspath] = mod } } } -var g_builtin_unsafe_package = []byte(` -import -$$ -package unsafe - type @"".Pointer uintptr - func @"".Offsetof (? any) uintptr - func @"".Sizeof (? any) uintptr - func @"".Alignof (? any) uintptr - -$$ -`) - func (c package_cache) add_builtin_unsafe_package() { pkg := new_package_file_cache_forever("unsafe", "unsafe") - pkg.process_package_data(g_builtin_unsafe_package) + pkg.process_package_data(nil, g_builtin_unsafe_package, false) c["unsafe"] = pkg } diff --git a/package_bin.go b/package_bin.go index 4a51c740..722bc1af 100644 --- a/package_bin.go +++ b/package_bin.go @@ -113,10 +113,9 @@ func (p *gc_bin_parser) parse_export(callback func(string, ast.Decl)) { // read version specific flags - extend as necessary switch p.version { - // case 6: - // ... - // fallthrough - case 5, 4, 3, 2, 1: + // case 7: + // fallthrough + case 6, 5, 4, 3, 2, 1: p.debugFormat = p.rawStringln(p.rawByte()) == "debug" p.trackAllTypes = p.int() != 0 p.posInfoFormat = p.int() != 0 @@ -152,6 +151,9 @@ func (p *gc_bin_parser) parse_export(callback func(string, ast.Decl)) { } } +// MaxPkgHeight is a height greater than any likely package height. +const MaxPkgHeight = 1e9 + func (p *gc_bin_parser) pkg() string { // if the package was seen before, i is its index (>= 0) i := p.tagOrIndex() @@ -172,6 +174,10 @@ func (p *gc_bin_parser) pkg() string { } else { path = p.string() } + var height int + if p.version >= 6 { + height = p.int() + } // we should never see an empty package name if name == "" { @@ -184,6 +190,18 @@ func (p *gc_bin_parser) pkg() string { panic(fmt.Sprintf("package path %q for pkg index %d", path, len(p.pkgList))) } + if p.version >= 6 { + if height < 0 || height >= MaxPkgHeight { + panic(fmt.Sprintf("bad package height %v for package %s", height, name)) + } + + // reexported packages should always have a lower height than + // the main package + // if len(p.pkgList) != 0 && height >= p.imp.Height { + // p.formatErrorf("package %q (height %d) reexports package %q (height %d)", p.imp.Path, p.imp.Height, path, height) + // } + } + var fullName string if path != "" { fullName = "!" + path + "!" + name @@ -193,6 +211,7 @@ func (p *gc_bin_parser) pkg() string { } // if the package was imported before, use that one; otherwise create a new one + // pkg.Height = height p.pkgList = append(p.pkgList, fullName) return p.pkgList[len(p.pkgList)-1] } @@ -242,14 +261,18 @@ func (p *gc_bin_parser) obj(tag int) { }, }) - case funcTag: + case funcTag, func2Tag: + var tparams *ast.FieldList + if tag == func2Tag { + tparams = p.paramList() + } p.pos() pkg, name := p.qualifiedName() params := p.paramList() results := p.paramList() p.callback(pkg, &ast.FuncDecl{ Name: ast.NewIdent(name), - Type: &ast.FuncType{Params: params, Results: results}, + Type: newFuncType(tparams, params, results), }) default: @@ -331,16 +354,18 @@ func (p *gc_bin_parser) typ(parent string) ast.Expr { // otherwise, i is the type tag (< 0) switch i { - case namedTag: + case namedTag, named2Tag: + var typeParams *ast.FieldList + if i == named2Tag { + typeParams = p.paramList() + } // read type object p.pos() parent, name := p.qualifiedName() tdecl := &ast.GenDecl{ Tok: token.TYPE, Specs: []ast.Spec{ - &ast.TypeSpec{ - Name: ast.NewIdent(name), - }, + newTypeSpec(name, typeParams), }, } @@ -367,17 +392,15 @@ func (p *gc_bin_parser) typ(parent string) ast.Expr { if !exported(name) { p.pkg() } - recv := p.paramList() params := p.paramList() results := p.paramList() p.int() // go:nointerface pragma - discarded - strip_method_receiver(recv) p.callback(parent, &ast.FuncDecl{ Recv: recv, Name: ast.NewIdent(name), - Type: &ast.FuncType{Params: params, Results: results}, + Type: newFuncType(nil, params, results), }) } return t @@ -417,18 +440,16 @@ func (p *gc_bin_parser) typ(parent string) ast.Expr { case interfaceTag: i := p.reserveMaybe() - var embeddeds []*ast.SelectorExpr - for n := p.int(); n > 0; n-- { - p.pos() - if named, ok := p.typ(parent).(*ast.SelectorExpr); ok { - embeddeds = append(embeddeds, named) + var embeddeds []*ast.Field + n := p.int() + if n > 0 { + embeddeds = make([]*ast.Field, n) + for i := 0; i < n; i++ { + embeddeds[i] = &ast.Field{Type: p.typ(parent)} } } methods := p.methodList(parent) - for _, field := range embeddeds { - methods = append(methods, &ast.Field{Type: field}) - } - return p.recordMaybe(i, &ast.InterfaceType{Methods: &ast.FieldList{List: methods}}) + return p.recordMaybe(i, &ast.InterfaceType{Methods: &ast.FieldList{List: append(embeddeds, methods...)}}) case mapTag: i := p.reserveMaybe() @@ -451,7 +472,31 @@ func (p *gc_bin_parser) typ(parent string) ast.Expr { } elt := p.typ(parent) return p.recordMaybe(i, &ast.ChanType{Dir: dir, Value: elt}) - + case typeParamTag: + i := p.reserveMaybe() + t0 := p.typ(parent) + return p.recordMaybe(i, t0) + case unionTag: + i := p.reserveMaybe() + n := p.int() + var expr ast.Expr + for i := 0; i < n; i++ { + title := p.int() != 0 + t0 := p.typ(parent) + if title { + t0 = &ast.UnaryExpr{Op: TILDE, X: t0} + } + if i == 0 { + expr = t0 + } else { + expr = &ast.BinaryExpr{ + X: expr, + Op: token.OR, + Y: t0, + } + } + } + return p.recordMaybe(i, expr) default: panic(fmt.Sprintf("unexpected type tag %d", i)) } @@ -595,6 +640,8 @@ func (p *gc_bin_parser) skipValue() { p.float() case stringTag: p.string() + case unknownTag: + break default: panic(fmt.Sprintf("unexpected value tag %d", tag)) } @@ -778,6 +825,12 @@ const ( // Type aliases aliasTag + + typeParamTag + unionTag // types.Union + named2Tag // has typeparams + func2Tag // has typeparams + signature2Tag // has typeparams ) var predeclared = []ast.Expr{ @@ -826,4 +879,7 @@ var predeclared = []ast.Expr{ // used internally by gc; never used by this package or in .a files ast.NewIdent("any"), + + // comparable + ast.NewIdent("comparable"), } diff --git a/package_types.go b/package_types.go new file mode 100644 index 00000000..cd131ab7 --- /dev/null +++ b/package_types.go @@ -0,0 +1,89 @@ +package main + +import ( + "bytes" + "go/importer" + "go/token" + "go/types" + "io" + "log" + + "github.com/visualfc/gocode/internal/gcexportdata" + pkgwalk "github.com/visualfc/gotools/types" +) + +type types_parser struct { + pfc *package_file_cache + pkg *types.Package +} + +// func DefaultPkgConfig() *pkgwalk.PkgConfig { +// conf := &pkgwalk.PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: false} +// conf.Info = &types.Info{ +// Uses: make(map[*ast.Ident]types.Object), +// Defs: make(map[*ast.Ident]types.Object), +// Selections: make(map[*ast.SelectorExpr]*types.Selection), +// //Types: make(map[ast.Expr]types.TypeAndValue), +// //Scopes : make(map[ast.Node]*types.Scope) +// //Implicits : make(map[ast.Node]types.Object) +// } +// conf.XInfo = &types.Info{ +// Uses: make(map[*ast.Ident]types.Object), +// Defs: make(map[*ast.Ident]types.Object), +// Selections: make(map[*ast.SelectorExpr]*types.Selection), +// } +// return conf +// } + +func (p *types_parser) initSource(import_path string, path string, dir string, pfc *package_file_cache, c *auto_complete_context) { + //conf := &pkgwalk.PkgConfig{IgnoreFuncBodies: true, AllowBinary: false, WithTestFiles: true} + // conf.Info = &types.Info{} + // conf.XInfo = &types.Info{} + c.mutex.Lock() + defer c.mutex.Unlock() + conf := pkgwalk.DefaultPkgConfig() + pkg, _, err := c.typesWalker.ImportHelper(".", path, import_path, conf, nil) + if err != nil { + log.Println(err) + } + p.pkg = pkg + // im := srcimporter.New(&build.Default, c.fset, c.packages) + // if dir != "" { + // p.pkg, _ = im.ImportFrom(path, dir, 0) + // } else { + // p.pkg, _ = im.Import(path) + // } + p.pfc = pfc +} + +func (p *types_parser) initData(path string, data []byte, pfc *package_file_cache, c *auto_complete_context) { + p.pkg, _ = importer.For("gc", func(path string) (io.ReadCloser, error) { + return NewMemReadClose(data), nil + }).Import(path) + p.pfc = pfc + if p.pkg != nil { + c.typesWalker.Imported[p.pkg.Path()] = p.pkg + } +} + +type MemReadClose struct { + *bytes.Buffer +} + +func (m *MemReadClose) Close() error { + return nil +} + +func NewMemReadClose(data []byte) *MemReadClose { + return &MemReadClose{bytes.NewBuffer(data)} +} + +func (p *types_parser) exportData() []byte { + if p.pkg == nil { + return nil + } + fset := token.NewFileSet() + var buf bytes.Buffer + gcexportdata.Write(&buf, fset, p.pkg) + return buf.Bytes() +} diff --git a/ripper.go b/ripper.go index e1c2f75b..58d7ba1f 100644 --- a/ripper.go +++ b/ripper.go @@ -16,8 +16,9 @@ type tok_pos_pair struct { } type tok_collection struct { - tokens []tok_pos_pair - fset *token.FileSet + tokens []tok_pos_pair + fset *token.FileSet + toplevel token.Token } func (this *tok_collection) next(s *scanner.Scanner) bool { @@ -50,8 +51,8 @@ func (this *tok_collection) find_decl_beg(pos int) int { lowi = i } } - cur = lowest + var findvar bool for i := lowi - 1; i >= 0; i-- { t := this.tokens[i] switch t.tok { @@ -59,13 +60,33 @@ func (this *tok_collection) find_decl_beg(pos int) int { cur++ case token.LBRACE: cur-- + case token.VAR: + findvar = true } if t.tok == token.SEMICOLON && cur == lowest { lowpos = this.fset.Position(t.pos).Offset + //var (\n jsData = `{ }`\n file2 *File = func() *File { + if this.toplevel == token.VAR && !findvar { + next := lowest + for j := i - 1; j >= 0; j-- { + jt := this.tokens[j] + switch jt.tok { + case token.RBRACE: + next++ + case token.LBRACE: + next-- + case token.VAR: + findvar = true + } + if jt.tok == token.SEMICOLON && next == lowest && findvar { + lowpos = this.fset.Position(jt.pos).Offset + break + } + } + } break } } - return lowpos } @@ -119,7 +140,6 @@ func (this *tok_collection) rip_off_decl(file []byte, cursor int) (int, []byte, s.Init(this.fset.AddFile("", this.fset.Base(), len(file)), file, nil, scanner.ScanComments) for this.next(&s) { } - beg, end := this.find_outermost_scope(cursor) if beg == -1 || end == -1 { return cursor, file, nil @@ -135,7 +155,8 @@ func (this *tok_collection) rip_off_decl(file []byte, cursor int) (int, []byte, return cursor - beg, newfile, ripped } -func rip_off_decl(file []byte, cursor int) (int, []byte, []byte) { +func rip_off_decl(file []byte, cursor int, topLevelTok token.Token) (int, []byte, []byte) { var tc tok_collection + tc.toplevel = topLevelTok return tc.rip_off_decl(file, cursor) } diff --git a/rpc.go b/rpc.go index b1e0ea7a..4e9f75d8 100644 --- a/rpc.go +++ b/rpc.go @@ -26,6 +26,7 @@ func (r *RPC) RPC_auto_complete(args *Args_auto_complete, reply *Reply_auto_comp reply.Arg0, reply.Arg1 = server_auto_complete(args.Arg0, args.Arg1, args.Arg2, args.Arg3) return nil } + func client_auto_complete(cli *rpc.Client, Arg0 []byte, Arg1 string, Arg2 int, Arg3 go_build_context) (c []candidate, d int) { var args Args_auto_complete var reply Reply_auto_complete @@ -40,6 +41,38 @@ func client_auto_complete(cli *rpc.Client, Arg0 []byte, Arg1 string, Arg2 int, A return reply.Arg0, reply.Arg1 } +type Args_types_info struct { + Arg0 []byte + Arg1 string + Arg2 int + Arg3 string + Arg4 go_build_context +} +type Reply_types_info struct { + Arg0 []string + Arg1 int +} + +func (r *RPC) RPC_types_info(args *Args_types_info, reply *Reply_types_info) error { + reply.Arg0, reply.Arg1 = server_types_info(args.Arg0, args.Arg1, args.Arg2, args.Arg3, args.Arg4) + return nil +} + +func client_types_info(cli *rpc.Client, Arg0 []byte, Arg1 string, Arg2 int, Arg3 string, Arg4 go_build_context) (c []string, d int) { + var args Args_types_info + var reply Reply_types_info + args.Arg0 = Arg0 + args.Arg1 = Arg1 + args.Arg2 = Arg2 + args.Arg3 = Arg3 + args.Arg4 = Arg4 + err := cli.Call("RPC.RPC_types_info", &args, &reply) + if err != nil { + panic(err) + } + return reply.Arg0, reply.Arg1 +} + // wrapper for: server_close type Args_close struct { diff --git a/server.go b/server.go index 82813df6..6871c10b 100644 --- a/server.go +++ b/server.go @@ -11,7 +11,10 @@ import ( "path/filepath" "reflect" "runtime" + "strings" "time" + + pkgwalk "github.com/visualfc/gotools/types" ) func do_server() int { @@ -72,14 +75,14 @@ func new_daemon(network, address string) *daemon { d.cmd_in = make(chan int, 1) d.pkgcache = new_package_cache() d.declcache = new_decl_cache(&d.context) - d.autocomplete = new_auto_complete_context(d.pkgcache, d.declcache) + d.autocomplete = new_auto_complete_context(&d.context, d.pkgcache, d.declcache) return d } func (this *daemon) drop_cache() { this.pkgcache = new_package_cache() this.declcache = new_decl_cache(&this.context) - this.autocomplete = new_auto_complete_context(this.pkgcache, this.declcache) + this.autocomplete = new_auto_complete_context(&this.context, this.pkgcache, this.declcache) } const ( @@ -131,6 +134,85 @@ var g_daemon *daemon // Corresponding client_* functions are autogenerated by goremote. //------------------------------------------------------------------------- +type TypesInfo struct { + ar []string +} + +func (p *TypesInfo) Write(data []byte) (n int, err error) { + p.ar = append(p.ar, strings.TrimSpace(string(data))) + return len(data), nil +} + +func server_types_info(file []byte, filename string, cursor int, addin string, context_packed go_build_context) (c []string, d int) { + context := unpack_build_context(&context_packed) + defer func() { + if err := recover(); err != nil { + print_backtrace(err) + c = []string{} + // drop cache + g_daemon.drop_cache() + } + }() + // TODO: Probably we don't care about comparing all the fields, checking GOROOT and GOPATH + // should be enough. + if !reflect.DeepEqual(g_daemon.context.Context, context.Context) { + g_daemon.context = context + g_daemon.drop_cache() + } + switch g_config.PackageLookupMode { + case "bzl": + // when package lookup mode is bzl, we set GOPATH to "" explicitly and + // BzlProjectRoot becomes valid (or empty) + var err error + g_daemon.context.GOPATH = "" + g_daemon.context.BzlProjectRoot, err = find_bzl_project_root(g_config.LibPath, filename) + if *g_debug && err != nil { + log.Printf("Bzl project root not found: %s", err) + } + case "gb": + // when package lookup mode is gb, we set GOPATH to "" explicitly and + // GBProjectRoot becomes valid (or empty) + var err error + g_daemon.context.GOPATH = "" + g_daemon.context.GBProjectRoot, err = find_gb_project_root(filename) + if *g_debug && err != nil { + log.Printf("Gb project root not found: %s", err) + } + case "go": + // get current package path for GO15VENDOREXPERIMENT hack + g_daemon.context.CurrentPackagePath = "" + dir, fname := filepath.Split(filename) + if dir == "." { + dir, _ = os.Getwd() + } + pkg, err := g_daemon.context.ImportDir(dir, build.FindOnly) + if err == nil { + if *g_debug { + log.Printf("Go project path: %s", pkg.ImportPath) + } + g_daemon.context.CurrentPackagePath = pkg.ImportPath + } else if *g_debug { + log.Printf("Go project path not found: %s", err) + } + + conf := pkgwalk.DefaultPkgConfig() + cursor := pkgwalk.NewFileCursor(file, dir, fname, cursor) + cursor.SetText(addin) + if file != nil { + g_daemon.autocomplete.typesWalker.UpdateSourceData(filename, file, true) + } + var stdout, stderr TypesInfo + g_daemon.autocomplete.typesWalker.SetOutput(&stdout, &stderr) + g_daemon.autocomplete.typesWalker.SetFindMode(&pkgwalk.FindMode{Info: true, Doc: true, Define: true}) + wpkg, conf, _ := g_daemon.autocomplete.typesWalker.Check(dir, conf, cursor) + if wpkg != nil { + g_daemon.autocomplete.typesWalker.LookupCursor(wpkg, conf, cursor) + return stdout.ar, len(stdout.ar) + } + } + return +} + func server_auto_complete(file []byte, filename string, cursor int, context_packed go_build_context) (c []candidate, d int) { context := unpack_build_context(&context_packed) defer func() { @@ -172,7 +254,11 @@ func server_auto_complete(file []byte, filename string, cursor int, context_pack case "go": // get current package path for GO15VENDOREXPERIMENT hack g_daemon.context.CurrentPackagePath = "" - pkg, err := g_daemon.context.ImportDir(filepath.Dir(filename), build.FindOnly) + dir, fname := filepath.Split(filename) + if dir == "." { + dir, _ = os.Getwd() + } + pkg, err := g_daemon.context.ImportDir(dir, build.FindOnly) if err == nil { if *g_debug { log.Printf("Go project path: %s", pkg.ImportPath) @@ -181,6 +267,16 @@ func server_auto_complete(file []byte, filename string, cursor int, context_pack } else if *g_debug { log.Printf("Go project path not found: %s", err) } + + //g_daemon.modList = gomod.LooupModList(dir) + + conf := DefaultPkgConfig() + g_daemon.autocomplete.typesCursor = cursor + g_daemon.autocomplete.typesWalker.FileSet.Base() - 1 + cursor := pkgwalk.NewFileCursor(file, dir, fname, cursor) + if file != nil { + g_daemon.autocomplete.typesWalker.UpdateSourceData(filename, file, true) + } + g_daemon.autocomplete.typesPkg, g_daemon.autocomplete.typesConf, _ = g_daemon.autocomplete.typesWalker.Check(dir, conf, cursor) } if *g_debug { var buf bytes.Buffer diff --git a/types_go117.go b/types_go117.go new file mode 100644 index 00000000..5fe70d65 --- /dev/null +++ b/types_go117.go @@ -0,0 +1,250 @@ +//go:build !go1.18 +// +build !go1.18 + +package main + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "strings" + + pkgwalk "github.com/visualfc/gotools/types" +) + +func unsupported() { + panic("type parameters are unsupported at this go version") +} + +var TILDE = token.VAR + 3 + +type TypeParam struct{ types.Type } + +func (*TypeParam) String() string { unsupported(); return "" } +func (*TypeParam) Underlying() types.Type { unsupported(); return nil } +func (*TypeParam) Index() int { unsupported(); return 0 } +func (*TypeParam) Constraint() types.Type { unsupported(); return nil } +func (*TypeParam) SetConstraint(types.Type) { unsupported() } +func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } + +// TypeParamList is a placeholder for an empty type parameter list. +type TypeParamList struct{} + +func (*TypeParamList) Len() int { return 0 } +func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil } + +func newFuncType(tparams, params, results *ast.FieldList) *ast.FuncType { + return &ast.FuncType{Params: params, Results: results} +} + +func newTypeSpec(name string, tparams *ast.FieldList) *ast.TypeSpec { + return &ast.TypeSpec{ + Name: ast.NewIdent(name), + } +} + +func toTypeParam(pkg *types.Package, t *TypeParam) ast.Expr { + unsupported() + return nil +} + +func toTypeSpec(pkg *types.Package, t *types.TypeName) *ast.TypeSpec { + var assign token.Pos + if t.IsAlias() { + assign = 1 + } + typ := t.Type() + return &ast.TypeSpec{ + Name: ast.NewIdent(t.Name()), + Assign: assign, + Type: toType(pkg, typ.Underlying()), + } +} + +func toFuncType(pkg *types.Package, sig *types.Signature) *ast.FuncType { + params := toFieldList(pkg, sig.Params()) + results := toFieldList(pkg, sig.Results()) + if sig.Variadic() { + n := len(params) + if n == 0 { + panic("TODO: toFuncType error") + } + toVariadic(params[n-1]) + } + return &ast.FuncType{ + Params: &ast.FieldList{List: params}, + Results: &ast.FieldList{List: results}, + } +} + +func ForFuncType(typ *ast.FuncType) *ast.FieldList { + return nil +} + +// converts type expressions like: +// ast.Expr +// *ast.Expr +// $ast$go/ast.Expr +// to a path that can be used to lookup a type related Decl +func get_type_path(e ast.Expr) (r type_path) { + if e == nil { + return type_path{"", "", nil} + } + + switch t := e.(type) { + case *ast.Ident: + r.name = t.Name + case *ast.StarExpr: + r = get_type_path(t.X) + case *ast.SelectorExpr: + if ident, ok := t.X.(*ast.Ident); ok { + r.pkg = ident.Name + } + r.name = t.Sel.Name + } + return +} + +func ast_decl_typeparams(decl ast.Decl) *ast.FieldList { + return nil +} + +func hasTypeParams(typ types.Type) bool { + return false +} + +func funcHasTypeParams(typ *ast.FuncType) bool { + return false +} + +func toNamedType(pkg *types.Package, t *types.Named) ast.Expr { + return toObjectExpr(pkg, t.Obj()) +} + +func lookup_types_near_instance(ident *ast.Ident, pos token.Pos, info *types.Info) types.Type { + return nil +} + +func DefaultPkgConfig() *pkgwalk.PkgConfig { + conf := &pkgwalk.PkgConfig{IgnoreFuncBodies: false, AllowBinary: true, WithTestFiles: true} + conf.Info = &types.Info{ + Uses: make(map[*ast.Ident]types.Object), + Defs: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Scopes: make(map[ast.Node]*types.Scope), + Implicits: make(map[ast.Node]types.Object), + } + conf.XInfo = &types.Info{ + Uses: make(map[*ast.Ident]types.Object), + Defs: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Scopes: make(map[ast.Node]*types.Scope), + Implicits: make(map[ast.Node]types.Object), + } + return conf +} + +func pretty_print_type_expr(out io.Writer, e ast.Expr, canonical_aliases map[string]string) { + switch t := e.(type) { + case *ast.StarExpr: + fmt.Fprintf(out, "*") + pretty_print_type_expr(out, t.X, canonical_aliases) + case *ast.Ident: + if strings.HasPrefix(t.Name, "$") { + // beautify anonymous types + switch t.Name[1] { + case 's': + fmt.Fprintf(out, "struct") + case 'i': + // ok, in most cases anonymous interface is an + // empty interface, I'll just pretend that + // it's always true + fmt.Fprintf(out, "interface{}") + } + } else if !*g_debug && strings.HasPrefix(t.Name, "!") { + // these are full package names for disambiguating and pretty + // printing packages within packages, e.g. + // !go/ast!ast vs. !github.com/nsf/my/ast!ast + // another ugly hack, if people are punished in hell for ugly hacks + // I'm screwed... + emarkIdx := strings.LastIndex(t.Name, "!") + path := t.Name[1:emarkIdx] + alias := canonical_aliases[path] + if alias == "" { + alias = t.Name[emarkIdx+1:] + } + fmt.Fprintf(out, alias) + } else { + fmt.Fprintf(out, t.Name) + } + case *ast.ArrayType: + al := "" + if t.Len != nil { + al = get_array_len(t.Len) + } + if al != "" { + fmt.Fprintf(out, "[%s]", al) + } else { + fmt.Fprintf(out, "[]") + } + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.SelectorExpr: + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ".%s", t.Sel.Name) + case *ast.FuncType: + fmt.Fprintf(out, "func(") + pretty_print_func_field_list(out, t.Params, canonical_aliases) + fmt.Fprintf(out, ")") + + buf := bytes.NewBuffer(make([]byte, 0, 256)) + nresults := pretty_print_func_field_list(buf, t.Results, canonical_aliases) + if nresults > 0 { + results := buf.String() + if strings.IndexAny(results, ", ") != -1 { + results = "(" + results + ")" + } + fmt.Fprintf(out, " %s", results) + } + case *ast.MapType: + fmt.Fprintf(out, "map[") + pretty_print_type_expr(out, t.Key, canonical_aliases) + fmt.Fprintf(out, "]") + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.InterfaceType: + fmt.Fprintf(out, "interface{}") + case *ast.Ellipsis: + fmt.Fprintf(out, "...") + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.StructType: + fmt.Fprintf(out, "struct") + case *ast.ChanType: + switch t.Dir { + case ast.RECV: + fmt.Fprintf(out, "<-chan ") + case ast.SEND: + fmt.Fprintf(out, "chan<- ") + case ast.SEND | ast.RECV: + fmt.Fprintf(out, "chan ") + } + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.ParenExpr: + fmt.Fprintf(out, "(") + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ")") + case *ast.BadExpr: + // TODO: probably I should check that in a separate function + // and simply discard declarations with BadExpr as a part of their + // type + default: + // the element has some weird type, just ignore it + } +} + +func funHasTypeArgs(fun ast.Expr) bool { + return false +} diff --git a/types_go118.go b/types_go118.go new file mode 100644 index 00000000..25618134 --- /dev/null +++ b/types_go118.go @@ -0,0 +1,356 @@ +//go:build go1.18 +// +build go1.18 + +package main + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "sort" + "strings" + + pkgwalk "github.com/visualfc/gotools/types" +) + +type TypeParam = types.TypeParam +type TypeParamList = types.TypeParamList + +var TILDE = token.TILDE + +func newFuncType(tparams, params, results *ast.FieldList) *ast.FuncType { + return &ast.FuncType{TypeParams: tparams, Params: params, Results: results} +} + +func newTypeSpec(name string, tparams *ast.FieldList) *ast.TypeSpec { + return &ast.TypeSpec{ + Name: ast.NewIdent(name), + TypeParams: tparams, + } +} + +func toTypeParam(pkg *types.Package, t *TypeParam) ast.Expr { + return toObjectExpr(pkg, t.Obj()) +} + +func ForSignature(sig *types.Signature) *TypeParamList { + return sig.TypeParams() +} + +func ForFuncType(typ *ast.FuncType) *ast.FieldList { + return typ.TypeParams +} + +// RecvTypeParams returns a nil slice. +func RecvTypeParams(sig *types.Signature) *TypeParamList { + return sig.RecvTypeParams() +} + +func ForNamed(named *types.Named) *TypeParamList { + return named.TypeParams() +} + +func toFieldListX(pkg *types.Package, t *types.TypeParamList) *ast.FieldList { + if t == nil { + return nil + } + n := t.Len() + flds := make([]*ast.Field, n) + for i := 0; i < n; i++ { + item := t.At(i) + names := []*ast.Ident{ast.NewIdent(item.Obj().Name())} + typ := toType(pkg, item.Constraint()) + flds[i] = &ast.Field{Names: names, Type: typ} + } + return &ast.FieldList{ + List: flds, + } +} + +func toFuncType(pkg *types.Package, sig *types.Signature) *ast.FuncType { + params := toFieldList(pkg, sig.Params()) + results := toFieldList(pkg, sig.Results()) + if sig.Variadic() { + n := len(params) + if n == 0 { + panic("TODO: toFuncType error") + } + toVariadic(params[n-1]) + } + return &ast.FuncType{ + TypeParams: toFieldListX(pkg, sig.TypeParams()), + Params: &ast.FieldList{List: params}, + Results: &ast.FieldList{List: results}, + } +} + +func toTypeSpec(pkg *types.Package, t *types.TypeName) *ast.TypeSpec { + var assign token.Pos + if t.IsAlias() { + assign = 1 + } + typ := t.Type() + ts := &ast.TypeSpec{ + Name: ast.NewIdent(t.Name()), + Assign: assign, + Type: toType(pkg, typ.Underlying()), + } + if named, ok := typ.(*types.Named); ok { + ts.TypeParams = toFieldListX(pkg, named.TypeParams()) + } + return ts +} + +// converts type expressions like: +// ast.Expr +// *ast.Expr +// $ast$go/ast.Expr +// to a path that can be used to lookup a type related Decl +func get_type_path(e ast.Expr) (r type_path) { + if e == nil { + return type_path{"", "", nil} + } + + switch t := e.(type) { + case *ast.Ident: + r.name = t.Name + case *ast.StarExpr: + r = get_type_path(t.X) + case *ast.SelectorExpr: + if ident, ok := t.X.(*ast.Ident); ok { + r.pkg = ident.Name + if r.pkg == "main" { + r.pkg = "" + } + } + r.name = t.Sel.Name + case *ast.IndexExpr: + r = get_type_path(t.X) + r.targs = []ast.Expr{t.Index} + case *ast.IndexListExpr: + r = get_type_path(t.X) + r.targs = t.Indices + } + return +} + +func ast_decl_typeparams(decl ast.Decl) *ast.FieldList { + switch t := decl.(type) { + case *ast.GenDecl: + if t.Tok == token.TYPE { + if len(t.Specs) > 0 { + if spec, ok := t.Specs[0].(*ast.TypeSpec); ok { + return spec.TypeParams + } + } + } + case *ast.FuncDecl: + return t.Type.TypeParams + } + return nil +} + +func hasTypeParams(typ types.Type) bool { + switch t := typ.(type) { + case *types.Named: + return t.TypeParams() != nil && (t.Origin() == t) + case *types.Signature: + return t.TypeParams() != nil + } + return false +} + +func funcHasTypeParams(typ *ast.FuncType) bool { + return typ.TypeParams != nil +} + +func toNamedType(pkg *types.Package, t *types.Named) ast.Expr { + expr := toObjectExpr(pkg, t.Obj()) + if targs := t.TypeArgs(); targs != nil { + n := targs.Len() + indices := make([]ast.Expr, n) + for i := 0; i < n; i++ { + indices[i] = toType(pkg, targs.At(i)) + } + if n == 1 { + expr = &ast.IndexExpr{ + X: expr, + Index: indices[0], + } + } else { + expr = &ast.IndexListExpr{ + X: expr, + Indices: indices, + } + } + } + return expr +} + +func lookup_types_near_instance(ident *ast.Ident, pos token.Pos, info *types.Info) types.Type { + var ar []*typ_distance + for k, v := range info.Instances { + if ident.Name == k.Name && pos > k.End() { + ar = append(ar, &typ_distance{pos - k.End(), v.Type}) + } + } + switch len(ar) { + case 0: + return nil + case 1: + return ar[0].typ + default: + sort.Slice(ar, func(i, j int) bool { + return ar[i].pos < ar[j].pos + }) + return ar[0].typ + } + return nil +} + +func DefaultPkgConfig() *pkgwalk.PkgConfig { + conf := &pkgwalk.PkgConfig{IgnoreFuncBodies: false, AllowBinary: true, WithTestFiles: true} + conf.Info = &types.Info{ + Uses: make(map[*ast.Ident]types.Object), + Defs: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Scopes: make(map[ast.Node]*types.Scope), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + } + conf.XInfo = &types.Info{ + Uses: make(map[*ast.Ident]types.Object), + Defs: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Scopes: make(map[ast.Node]*types.Scope), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + } + return conf +} + +func pretty_print_type_expr(out io.Writer, e ast.Expr, canonical_aliases map[string]string) { + switch t := e.(type) { + case *ast.StarExpr: + fmt.Fprintf(out, "*") + pretty_print_type_expr(out, t.X, canonical_aliases) + case *ast.Ident: + if strings.HasPrefix(t.Name, "$") { + // beautify anonymous types + switch t.Name[1] { + case 's': + fmt.Fprintf(out, "struct") + case 'i': + // ok, in most cases anonymous interface is an + // empty interface, I'll just pretend that + // it's always true + fmt.Fprintf(out, "interface{}") + } + } else if !*g_debug && strings.HasPrefix(t.Name, "!") { + // these are full package names for disambiguating and pretty + // printing packages within packages, e.g. + // !go/ast!ast vs. !github.com/nsf/my/ast!ast + // another ugly hack, if people are punished in hell for ugly hacks + // I'm screwed... + emarkIdx := strings.LastIndex(t.Name, "!") + path := t.Name[1:emarkIdx] + alias := canonical_aliases[path] + if alias == "" { + alias = t.Name[emarkIdx+1:] + } + fmt.Fprintf(out, alias) + } else { + fmt.Fprintf(out, t.Name) + } + case *ast.ArrayType: + al := "" + if t.Len != nil { + al = get_array_len(t.Len) + } + if al != "" { + fmt.Fprintf(out, "[%s]", al) + } else { + fmt.Fprintf(out, "[]") + } + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.SelectorExpr: + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ".%s", t.Sel.Name) + case *ast.FuncType: + fmt.Fprintf(out, "func(") + pretty_print_func_field_list(out, t.Params, canonical_aliases) + fmt.Fprintf(out, ")") + + buf := bytes.NewBuffer(make([]byte, 0, 256)) + nresults := pretty_print_func_field_list(buf, t.Results, canonical_aliases) + if nresults > 0 { + results := buf.String() + if strings.IndexAny(results, ", ") != -1 { + results = "(" + results + ")" + } + fmt.Fprintf(out, " %s", results) + } + case *ast.MapType: + fmt.Fprintf(out, "map[") + pretty_print_type_expr(out, t.Key, canonical_aliases) + fmt.Fprintf(out, "]") + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.InterfaceType: + fmt.Fprintf(out, "interface{}") + case *ast.Ellipsis: + fmt.Fprintf(out, "...") + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.StructType: + fmt.Fprintf(out, "struct") + case *ast.ChanType: + switch t.Dir { + case ast.RECV: + fmt.Fprintf(out, "<-chan ") + case ast.SEND: + fmt.Fprintf(out, "chan<- ") + case ast.SEND | ast.RECV: + fmt.Fprintf(out, "chan ") + } + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.ParenExpr: + fmt.Fprintf(out, "(") + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ")") + case *ast.IndexExpr: + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, "[") + pretty_print_type_expr(out, t.Index, canonical_aliases) + fmt.Fprintf(out, "]") + case *ast.IndexListExpr: + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, "[") + for i, index := range t.Indices { + if i != 0 { + fmt.Fprintf(out, ", ") + } + pretty_print_type_expr(out, index, canonical_aliases) + } + fmt.Fprintf(out, "]") + case *ast.BadExpr: + // TODO: probably I should check that in a separate function + // and simply discard declarations with BadExpr as a part of their + // type + default: + // the element has some weird type, just ignore it + } +} + +func funHasTypeArgs(fun ast.Expr) bool { + switch fun.(type) { + case *ast.IndexExpr: + return true + case *ast.IndexListExpr: + return true + } + return false +} diff --git a/unsafe_go116.go b/unsafe_go116.go new file mode 100644 index 00000000..b1187167 --- /dev/null +++ b/unsafe_go116.go @@ -0,0 +1,15 @@ +//go:build !go1.17 +// +build !go1.17 + +package main + +var g_builtin_unsafe_package = []byte(` +import +$$ +package unsafe + func @"".Alignof(x ArbitraryType) uintptr + func @"".Offsetof(x ArbitraryType) uintptr + func @"".Sizeof(x ArbitraryType) uintptr + type @"".Pointer *ArbitraryType +$$ +`) diff --git a/unsafe_go117.go b/unsafe_go117.go new file mode 100644 index 00000000..00ece5ca --- /dev/null +++ b/unsafe_go117.go @@ -0,0 +1,17 @@ +//go:build go1.17 && !go1.20 +// +build go1.17,!go1.20 + +package main + +var g_builtin_unsafe_package = []byte(` +import +$$ +package unsafe + func @"".Alignof(x ArbitraryType) uintptr + func @"".Offsetof(x ArbitraryType) uintptr + func @"".Sizeof(x ArbitraryType) uintptr + type @"".Pointer *ArbitraryType + func @"".Slice(ptr *ArbitraryType, len IntegerType) []ArbitraryType + func @"".Add(ptr Pointer, len IntegerType) Pointer +$$ +`) diff --git a/unsafe_go120.go b/unsafe_go120.go new file mode 100644 index 00000000..492317f8 --- /dev/null +++ b/unsafe_go120.go @@ -0,0 +1,20 @@ +//go:build go1.20 +// +build go1.20 + +package main + +var g_builtin_unsafe_package = []byte(` +import +$$ +package unsafe + func @"".Alignof(x ArbitraryType) uintptr + func @"".Offsetof(x ArbitraryType) uintptr + func @"".Sizeof(x ArbitraryType) uintptr + type @"".Pointer *ArbitraryType + func @"".Slice(ptr *ArbitraryType, len IntegerType) []ArbitraryType + func @"".SliceData(slice []ArbitraryType) *ArbitraryType + func @"".Add(ptr Pointer, len IntegerType) Pointer + func @"".String(ptr *byte, len IntegerType) string + func @"".StringData(str string) *byte +$$ +`) diff --git a/utils.go b/utils.go index 8b153c5f..4a8412b7 100644 --- a/utils.go +++ b/utils.go @@ -161,7 +161,9 @@ func vendorlessImportPath(ipath string, currentPackagePath string) (string, bool } // this import path does not belong to the current package if currentPackagePath != "" && !strings.Contains(currentPackagePath, split[0]) { - return "", false + if split[0] != currentPackagePath+"/" { + return "", false + } } // Devendorize for use in import statement. if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { @@ -173,6 +175,21 @@ func vendorlessImportPath(ipath string, currentPackagePath string) (string, bool return ipath, true } +func internalImportPath(ipath string, currentPackagePath string) (string, bool) { + split := strings.Split(ipath, "/internal") + // no vendor in path + // if len(split) == 1 { + // return "", false + // } + // this import path does not belong to the current package + if currentPackagePath != "" && !strings.Contains(currentPackagePath, split[0]) { + if split[0] != currentPackagePath+"/" { + return "", false + } + } + return ipath, true +} + //------------------------------------------------------------------------- // print_backtrace //