aboutsummaryrefslogtreecommitdiff
path: root/src/go
diff options
context:
space:
mode:
Diffstat (limited to 'src/go')
-rw-r--r--src/go/build/build.go437
-rw-r--r--src/go/build/build_test.go227
-rw-r--r--src/go/build/deps_test.go58
-rw-r--r--src/go/build/read.go268
-rw-r--r--src/go/build/read_test.go91
-rw-r--r--src/go/doc/doc_test.go6
-rw-r--r--src/go/doc/headscan.go5
-rw-r--r--src/go/internal/gccgoimporter/importer.go2
-rw-r--r--src/go/internal/gccgoimporter/importer_test.go1
-rw-r--r--src/go/internal/gccgoimporter/parser.go7
-rw-r--r--src/go/internal/gccgoimporter/testdata/notinheap.go4
-rw-r--r--src/go/internal/gccgoimporter/testdata/notinheap.gox7
-rw-r--r--src/go/internal/gcimporter/gcimporter.go3
-rw-r--r--src/go/internal/gcimporter/support.go5
-rw-r--r--src/go/parser/interface.go8
-rw-r--r--src/go/parser/parser_test.go4
-rw-r--r--src/go/printer/performance_test.go2
-rw-r--r--src/go/token/position.go21
-rw-r--r--src/go/types/decl.go266
-rw-r--r--src/go/types/gotype.go4
-rw-r--r--src/go/types/resolver.go278
-rw-r--r--src/go/types/self_test.go80
-rw-r--r--src/go/types/stdlib_test.go2
23 files changed, 1194 insertions, 592 deletions
diff --git a/src/go/build/build.go b/src/go/build/build.go
index 5c3d876130..80e9b9c739 100644
--- a/src/go/build/build.go
+++ b/src/go/build/build.go
@@ -10,11 +10,11 @@ import (
"fmt"
"go/ast"
"go/doc"
- "go/parser"
"go/token"
"internal/goroot"
"internal/goversion"
"io"
+ "io/fs"
"io/ioutil"
"os"
"os/exec"
@@ -98,10 +98,10 @@ type Context struct {
// filepath.EvalSymlinks.
HasSubdir func(root, dir string) (rel string, ok bool)
- // ReadDir returns a slice of os.FileInfo, sorted by Name,
+ // ReadDir returns a slice of fs.FileInfo, sorted by Name,
// describing the content of the named directory.
// If ReadDir is nil, Import uses ioutil.ReadDir.
- ReadDir func(dir string) ([]os.FileInfo, error)
+ ReadDir func(dir string) ([]fs.FileInfo, error)
// OpenFile opens a file (not a directory) for reading.
// If OpenFile is nil, Import uses os.Open.
@@ -183,7 +183,7 @@ func hasSubdir(root, dir string) (rel string, ok bool) {
}
// readDir calls ctxt.ReadDir (if not nil) or else ioutil.ReadDir.
-func (ctxt *Context) readDir(path string) ([]os.FileInfo, error) {
+func (ctxt *Context) readDir(path string) ([]fs.FileInfo, error) {
if f := ctxt.ReadDir; f != nil {
return f(path)
}
@@ -409,19 +409,20 @@ type Package struct {
BinaryOnly bool // cannot be rebuilt from source (has //go:binary-only-package comment)
// Source files
- GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
- CgoFiles []string // .go source files that import "C"
- IgnoredGoFiles []string // .go source files ignored for this build
- InvalidGoFiles []string // .go source files with detected problems (parse error, wrong package name, and so on)
- CFiles []string // .c source files
- CXXFiles []string // .cc, .cpp and .cxx source files
- MFiles []string // .m (Objective-C) source files
- HFiles []string // .h, .hh, .hpp and .hxx source files
- FFiles []string // .f, .F, .for and .f90 Fortran source files
- SFiles []string // .s source files
- SwigFiles []string // .swig files
- SwigCXXFiles []string // .swigcxx files
- SysoFiles []string // .syso system object files to add to archive
+ GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
+ CgoFiles []string // .go source files that import "C"
+ IgnoredGoFiles []string // .go source files ignored for this build (including ignored _test.go files)
+ InvalidGoFiles []string // .go source files with detected problems (parse error, wrong package name, and so on)
+ IgnoredOtherFiles []string // non-.go source files ignored for this build
+ CFiles []string // .c source files
+ CXXFiles []string // .cc, .cpp and .cxx source files
+ MFiles []string // .m (Objective-C) source files
+ HFiles []string // .h, .hh, .hpp and .hxx source files
+ FFiles []string // .f, .F, .for and .f90 Fortran source files
+ SFiles []string // .s source files
+ SwigFiles []string // .swig files
+ SwigCXXFiles []string // .swigcxx files
+ SysoFiles []string // .syso system object files to add to archive
// Cgo directives
CgoCFLAGS []string // Cgo CFLAGS directives
@@ -431,17 +432,26 @@ type Package struct {
CgoLDFLAGS []string // Cgo LDFLAGS directives
CgoPkgConfig []string // Cgo pkg-config directives
- // Dependency information
- Imports []string // import paths from GoFiles, CgoFiles
- ImportPos map[string][]token.Position // line information for Imports
-
// Test information
- TestGoFiles []string // _test.go files in package
+ TestGoFiles []string // _test.go files in package
+ XTestGoFiles []string // _test.go files outside package
+
+ // Dependency information
+ Imports []string // import paths from GoFiles, CgoFiles
+ ImportPos map[string][]token.Position // line information for Imports
TestImports []string // import paths from TestGoFiles
TestImportPos map[string][]token.Position // line information for TestImports
- XTestGoFiles []string // _test.go files outside package
XTestImports []string // import paths from XTestGoFiles
XTestImportPos map[string][]token.Position // line information for XTestImports
+
+ // //go:embed patterns found in Go source files
+ // For example, if a source file says
+ // //go:embed a* b.c
+ // then the list will contain those two strings as separate entries.
+ // (See package embed for more details about //go:embed.)
+ EmbedPatterns []string // patterns from GoFiles, CgoFiles
+ TestEmbedPatterns []string // patterns from TestGoFiles
+ XTestEmbedPatterns []string // patterns from XTestGoFiles
}
// IsCommand reports whether the package is considered a
@@ -784,6 +794,7 @@ Found:
var badGoError error
var Sfiles []string // files with ".S"(capital S)/.sx(capital s equivalent for case insensitive filesystems)
var firstFile, firstCommentFile string
+ var embeds, testEmbeds, xTestEmbeds []string
imported := make(map[string][]token.Position)
testImported := make(map[string][]token.Position)
xTestImported := make(map[string][]token.Position)
@@ -793,7 +804,7 @@ Found:
if d.IsDir() {
continue
}
- if (d.Mode() & os.ModeSymlink) != 0 {
+ if d.Mode()&fs.ModeSymlink != 0 {
if fi, err := os.Stat(filepath.Join(p.Dir, d.Name())); err == nil && fi.IsDir() {
// Symlinks to directories are not source files.
continue
@@ -810,60 +821,43 @@ Found:
p.InvalidGoFiles = append(p.InvalidGoFiles, name)
}
- match, data, filename, err := ctxt.matchFile(p.Dir, name, allTags, &p.BinaryOnly)
+ info, err := ctxt.matchFile(p.Dir, name, allTags, &p.BinaryOnly, fset)
if err != nil {
badFile(err)
continue
}
- if !match {
- if ext == ".go" {
+ if info == nil {
+ if strings.HasPrefix(name, "_") || strings.HasPrefix(name, ".") {
+ // not due to build constraints - don't report
+ } else if ext == ".go" {
p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
+ } else if fileListForExt(p, ext) != nil {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, name)
}
continue
}
+ data, filename := info.header, info.name
// Going to save the file. For non-Go files, can stop here.
switch ext {
- case ".c":
- p.CFiles = append(p.CFiles, name)
- continue
- case ".cc", ".cpp", ".cxx":
- p.CXXFiles = append(p.CXXFiles, name)
- continue
- case ".m":
- p.MFiles = append(p.MFiles, name)
- continue
- case ".h", ".hh", ".hpp", ".hxx":
- p.HFiles = append(p.HFiles, name)
- continue
- case ".f", ".F", ".for", ".f90":
- p.FFiles = append(p.FFiles, name)
- continue
- case ".s":
- p.SFiles = append(p.SFiles, name)
- continue
+ case ".go":
+ // keep going
case ".S", ".sx":
+ // special case for cgo, handled at end
Sfiles = append(Sfiles, name)
continue
- case ".swig":
- p.SwigFiles = append(p.SwigFiles, name)
- continue
- case ".swigcxx":
- p.SwigCXXFiles = append(p.SwigCXXFiles, name)
- continue
- case ".syso":
- // binary objects to add to package archive
- // Likely of the form foo_windows.syso, but
- // the name was vetted above with goodOSArchFile.
- p.SysoFiles = append(p.SysoFiles, name)
+ default:
+ if list := fileListForExt(p, ext); list != nil {
+ *list = append(*list, name)
+ }
continue
}
- pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments)
- if err != nil {
- badFile(err)
+ if info.parseErr != nil {
+ badFile(info.parseErr)
continue
}
+ pf := info.parsed
pkg := pf.Name.Name
if pkg == "documentation" {
@@ -910,48 +904,23 @@ Found:
}
// Record imports and information about cgo.
- type importPos struct {
- path string
- pos token.Pos
- }
- var fileImports []importPos
isCgo := false
- for _, decl := range pf.Decls {
- d, ok := decl.(*ast.GenDecl)
- if !ok {
- continue
- }
- for _, dspec := range d.Specs {
- spec, ok := dspec.(*ast.ImportSpec)
- if !ok {
+ for _, imp := range info.imports {
+ if imp.path == "C" {
+ if isTest {
+ badFile(fmt.Errorf("use of cgo in test %s not supported", filename))
continue
}
- quoted := spec.Path.Value
- path, err := strconv.Unquote(quoted)
- if err != nil {
- panic(fmt.Sprintf("%s: parser returned invalid quoted string: <%s>", filename, quoted))
- }
- fileImports = append(fileImports, importPos{path, spec.Pos()})
- if path == "C" {
- if isTest {
- badFile(fmt.Errorf("use of cgo in test %s not supported", filename))
- } else {
- cg := spec.Doc
- if cg == nil && len(d.Specs) == 1 {
- cg = d.Doc
- }
- if cg != nil {
- if err := ctxt.saveCgo(filename, p, cg); err != nil {
- badFile(err)
- }
- }
- isCgo = true
+ isCgo = true
+ if imp.doc != nil {
+ if err := ctxt.saveCgo(filename, p, imp.doc); err != nil {
+ badFile(err)
}
}
}
}
- var fileList *[]string
+ var fileList, embedList *[]string
var importMap map[string][]token.Position
switch {
case isCgo:
@@ -959,6 +928,7 @@ Found:
if ctxt.CgoEnabled {
fileList = &p.CgoFiles
importMap = imported
+ embedList = &embeds
} else {
// Ignore imports from cgo files if cgo is disabled.
fileList = &p.IgnoredGoFiles
@@ -966,19 +936,25 @@ Found:
case isXTest:
fileList = &p.XTestGoFiles
importMap = xTestImported
+ embedList = &xTestEmbeds
case isTest:
fileList = &p.TestGoFiles
importMap = testImported
+ embedList = &testEmbeds
default:
fileList = &p.GoFiles
importMap = imported
+ embedList = &embeds
}
*fileList = append(*fileList, name)
if importMap != nil {
- for _, imp := range fileImports {
+ for _, imp := range info.imports {
importMap[imp.path] = append(importMap[imp.path], fset.Position(imp.pos))
}
}
+ if embedList != nil {
+ *embedList = append(*embedList, info.embeds...)
+ }
}
for tag := range allTags {
@@ -986,6 +962,10 @@ Found:
}
sort.Strings(p.AllTags)
+ p.EmbedPatterns = uniq(embeds)
+ p.TestEmbedPatterns = uniq(testEmbeds)
+ p.XTestEmbedPatterns = uniq(xTestEmbeds)
+
p.Imports, p.ImportPos = cleanImports(imported)
p.TestImports, p.TestImportPos = cleanImports(testImported)
p.XTestImports, p.XTestImportPos = cleanImports(xTestImported)
@@ -996,6 +976,9 @@ Found:
if len(p.CgoFiles) > 0 {
p.SFiles = append(p.SFiles, Sfiles...)
sort.Strings(p.SFiles)
+ } else {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, Sfiles...)
+ sort.Strings(p.IgnoredOtherFiles)
}
if badGoError != nil {
@@ -1007,6 +990,46 @@ Found:
return p, pkgerr
}
+func fileListForExt(p *Package, ext string) *[]string {
+ switch ext {
+ case ".c":
+ return &p.CFiles
+ case ".cc", ".cpp", ".cxx":
+ return &p.CXXFiles
+ case ".m":
+ return &p.MFiles
+ case ".h", ".hh", ".hpp", ".hxx":
+ return &p.HFiles
+ case ".f", ".F", ".for", ".f90":
+ return &p.FFiles
+ case ".s", ".S", ".sx":
+ return &p.SFiles
+ case ".swig":
+ return &p.SwigFiles
+ case ".swigcxx":
+ return &p.SwigCXXFiles
+ case ".syso":
+ return &p.SysoFiles
+ }
+ return nil
+}
+
+func uniq(list []string) []string {
+ if list == nil {
+ return nil
+ }
+ out := make([]string, len(list))
+ copy(out, list)
+ sort.Strings(out)
+ uniq := out[:0]
+ for _, x := range out {
+ if len(uniq) == 0 || uniq[len(uniq)-1] != x {
+ uniq = append(uniq, x)
+ }
+ }
+ return uniq
+}
+
var errNoModules = errors.New("not using modules")
// importGo checks whether it can use the go command to find the directory for path.
@@ -1298,22 +1321,46 @@ func parseWord(data []byte) (word, rest []byte) {
// MatchFile considers the name of the file and may use ctxt.OpenFile to
// read some or all of the file's content.
func (ctxt *Context) MatchFile(dir, name string) (match bool, err error) {
- match, _, _, err = ctxt.matchFile(dir, name, nil, nil)
- return
+ info, err := ctxt.matchFile(dir, name, nil, nil, nil)
+ return info != nil, err
+}
+
+var dummyPkg Package
+
+// fileInfo records information learned about a file included in a build.
+type fileInfo struct {
+ name string // full name including dir
+ header []byte
+ fset *token.FileSet
+ parsed *ast.File
+ parseErr error
+ imports []fileImport
+ embeds []string
+ embedErr error
+}
+
+type fileImport struct {
+ path string
+ pos token.Pos
+ doc *ast.CommentGroup
}
// matchFile determines whether the file with the given name in the given directory
// should be included in the package being constructed.
-// It returns the data read from the file.
+// If the file should be included, matchFile returns a non-nil *fileInfo (and a nil error).
+// Non-nil errors are reserved for unexpected problems.
+//
// If name denotes a Go program, matchFile reads until the end of the
-// imports (and returns that data) even though it only considers text
-// until the first non-comment.
+// imports and returns that section of the file in the fileInfo's header field,
+// even though it only considers text until the first non-comment
+// for +build lines.
+//
// If allTags is non-nil, matchFile records any encountered build tag
// by setting allTags[tag] = true.
-func (ctxt *Context) matchFile(dir, name string, allTags map[string]bool, binaryOnly *bool) (match bool, data []byte, filename string, err error) {
+func (ctxt *Context) matchFile(dir, name string, allTags map[string]bool, binaryOnly *bool, fset *token.FileSet) (*fileInfo, error) {
if strings.HasPrefix(name, "_") ||
strings.HasPrefix(name, ".") {
- return
+ return nil, nil
}
i := strings.LastIndex(name, ".")
@@ -1323,53 +1370,53 @@ func (ctxt *Context) matchFile(dir, name string, allTags map[string]bool, binary
ext := name[i:]
if !ctxt.goodOSArchFile(name, allTags) && !ctxt.UseAllFiles {
- return
+ return nil, nil
}
- switch ext {
- case ".go", ".c", ".cc", ".cxx", ".cpp", ".m", ".s", ".h", ".hh", ".hpp", ".hxx", ".f", ".F", ".f90", ".S", ".sx", ".swig", ".swigcxx":
- // tentatively okay - read to make sure
- case ".syso":
- // binary, no reading
- match = true
- return
- default:
+ if ext != ".go" && fileListForExt(&dummyPkg, ext) == nil {
// skip
- return
+ return nil, nil
+ }
+
+ info := &fileInfo{name: ctxt.joinPath(dir, name), fset: fset}
+ if ext == ".syso" {
+ // binary, no reading
+ return info, nil
}
- filename = ctxt.joinPath(dir, name)
- f, err := ctxt.openFile(filename)
+ f, err := ctxt.openFile(info.name)
if err != nil {
- return
+ return nil, err
}
- if strings.HasSuffix(filename, ".go") {
- data, err = readImports(f, false, nil)
- if strings.HasSuffix(filename, "_test.go") {
+ if strings.HasSuffix(name, ".go") {
+ err = readGoInfo(f, info)
+ if strings.HasSuffix(name, "_test.go") {
binaryOnly = nil // ignore //go:binary-only-package comments in _test.go files
}
} else {
binaryOnly = nil // ignore //go:binary-only-package comments in non-Go sources
- data, err = readComments(f)
+ info.header, err = readComments(f)
}
f.Close()
if err != nil {
- err = fmt.Errorf("read %s: %v", filename, err)
- return
+ return nil, fmt.Errorf("read %s: %v", info.name, err)
}
// Look for +build comments to accept or reject the file.
- var sawBinaryOnly bool
- if !ctxt.shouldBuild(data, allTags, &sawBinaryOnly) && !ctxt.UseAllFiles {
- return
+ ok, sawBinaryOnly, err := ctxt.shouldBuild(info.header, allTags)
+ if err != nil {
+ return nil, err
+ }
+ if !ok && !ctxt.UseAllFiles {
+ return nil, nil
}
if binaryOnly != nil && sawBinaryOnly {
*binaryOnly = true
}
- match = true
- return
+
+ return info, nil
}
func cleanImports(m map[string][]token.Position) ([]string, map[string][]token.Position) {
@@ -1391,7 +1438,25 @@ func ImportDir(dir string, mode ImportMode) (*Package, error) {
return Default.ImportDir(dir, mode)
}
-var slashslash = []byte("//")
+var (
+ bSlashSlash = []byte(slashSlash)
+ bStarSlash = []byte(starSlash)
+ bSlashStar = []byte(slashStar)
+
+ goBuildComment = []byte("//go:build")
+
+ errGoBuildWithoutBuild = errors.New("//go:build comment without // +build comment")
+ errMultipleGoBuild = errors.New("multiple //go:build comments") // unused in Go 1.(N-1)
+)
+
+func isGoBuildComment(line []byte) bool {
+ if !bytes.HasPrefix(line, goBuildComment) {
+ return false
+ }
+ line = bytes.TrimSpace(line)
+ rest := line[len(goBuildComment):]
+ return len(rest) == 0 || len(bytes.TrimSpace(rest)) < len(rest)
+}
// Special comment denoting a binary-only package.
// See https://golang.org/design/2775-binary-only-packages
@@ -1411,37 +1476,24 @@ var binaryOnlyComment = []byte("//go:binary-only-package")
//
// marks the file as applicable only on Windows and Linux.
//
-// If shouldBuild finds a //go:binary-only-package comment in the file,
-// it sets *binaryOnly to true. Otherwise it does not change *binaryOnly.
+// For each build tag it consults, shouldBuild sets allTags[tag] = true.
//
-func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool, binaryOnly *bool) bool {
- sawBinaryOnly := false
+// shouldBuild reports whether the file should be built
+// and whether a //go:binary-only-package comment was found.
+func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool) (shouldBuild, binaryOnly bool, err error) {
// Pass 1. Identify leading run of // comments and blank lines,
// which must be followed by a blank line.
- end := 0
- p := content
- for len(p) > 0 {
- line := p
- if i := bytes.IndexByte(line, '\n'); i >= 0 {
- line, p = line[:i], p[i+1:]
- } else {
- p = p[len(p):]
- }
- line = bytes.TrimSpace(line)
- if len(line) == 0 { // Blank line
- end = len(content) - len(p)
- continue
- }
- if !bytes.HasPrefix(line, slashslash) { // Not comment line
- break
- }
+ // Also identify any //go:build comments.
+ content, goBuild, sawBinaryOnly, err := parseFileHeader(content)
+ if err != nil {
+ return false, false, err
}
- content = content[:end]
- // Pass 2. Process each line in the run.
- p = content
- allok := true
+ // Pass 2. Process each +build line in the run.
+ p := content
+ shouldBuild = true
+ sawBuild := false
for len(p) > 0 {
line := p
if i := bytes.IndexByte(line, '\n'); i >= 0 {
@@ -1450,17 +1502,15 @@ func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool, binary
p = p[len(p):]
}
line = bytes.TrimSpace(line)
- if !bytes.HasPrefix(line, slashslash) {
+ if !bytes.HasPrefix(line, bSlashSlash) {
continue
}
- if bytes.Equal(line, binaryOnlyComment) {
- sawBinaryOnly = true
- }
- line = bytes.TrimSpace(line[len(slashslash):])
+ line = bytes.TrimSpace(line[len(bSlashSlash):])
if len(line) > 0 && line[0] == '+' {
// Looks like a comment +line.
f := strings.Fields(string(line))
if f[0] == "+build" {
+ sawBuild = true
ok := false
for _, tok := range f[1:] {
if ctxt.match(tok, allTags) {
@@ -1468,17 +1518,84 @@ func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool, binary
}
}
if !ok {
- allok = false
+ shouldBuild = false
}
}
}
}
- if binaryOnly != nil && sawBinaryOnly {
- *binaryOnly = true
+ if goBuild != nil && !sawBuild {
+ return false, false, errGoBuildWithoutBuild
+ }
+
+ return shouldBuild, sawBinaryOnly, nil
+}
+
+func parseFileHeader(content []byte) (trimmed, goBuild []byte, sawBinaryOnly bool, err error) {
+ end := 0
+ p := content
+ ended := false // found non-blank, non-// line, so stopped accepting // +build lines
+ inSlashStar := false // in /* */ comment
+
+Lines:
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 && !ended { // Blank line
+ // Remember position of most recent blank line.
+ // When we find the first non-blank, non-// line,
+ // this "end" position marks the latest file position
+ // where a // +build line can appear.
+ // (It must appear _before_ a blank line before the non-blank, non-// line.
+ // Yes, that's confusing, which is part of why we moved to //go:build lines.)
+ // Note that ended==false here means that inSlashStar==false,
+ // since seeing a /* would have set ended==true.
+ end = len(content) - len(p)
+ continue Lines
+ }
+ if !bytes.HasPrefix(line, slashSlash) { // Not comment line
+ ended = true
+ }
+
+ if !inSlashStar && isGoBuildComment(line) {
+ if false && goBuild != nil { // enabled in Go 1.N
+ return nil, nil, false, errMultipleGoBuild
+ }
+ goBuild = line
+ }
+ if !inSlashStar && bytes.Equal(line, binaryOnlyComment) {
+ sawBinaryOnly = true
+ }
+
+ Comments:
+ for len(line) > 0 {
+ if inSlashStar {
+ if i := bytes.Index(line, starSlash); i >= 0 {
+ inSlashStar = false
+ line = bytes.TrimSpace(line[i+len(starSlash):])
+ continue Comments
+ }
+ continue Lines
+ }
+ if bytes.HasPrefix(line, bSlashSlash) {
+ continue Lines
+ }
+ if bytes.HasPrefix(line, bSlashStar) {
+ inSlashStar = true
+ line = bytes.TrimSpace(line[len(bSlashStar):])
+ continue Comments
+ }
+ // Found non-comment text.
+ break Lines
+ }
}
- return allok
+ return content[:end], goBuild, sawBinaryOnly, nil
}
// saveCgo saves the information from the #cgo lines in the import "C" comment.
diff --git a/src/go/build/build_test.go b/src/go/build/build_test.go
index 22c62ce87d..5a4a2d62f5 100644
--- a/src/go/build/build_test.go
+++ b/src/go/build/build_test.go
@@ -120,7 +120,7 @@ func TestMultiplePackageImport(t *testing.T) {
}
func TestLocalDirectory(t *testing.T) {
- if (runtime.GOOS == "darwin" || runtime.GOOS == "ios") && runtime.GOARCH == "arm64" {
+ if runtime.GOOS == "ios" {
t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
}
@@ -138,48 +138,178 @@ func TestLocalDirectory(t *testing.T) {
}
}
-func TestShouldBuild(t *testing.T) {
- const file1 = "// +build tag1\n\n" +
- "package main\n"
- want1 := map[string]bool{"tag1": true}
-
- const file2 = "// +build cgo\n\n" +
- "// This package implements parsing of tags like\n" +
- "// +build tag1\n" +
- "package build"
- want2 := map[string]bool{"cgo": true}
-
- const file3 = "// Copyright The Go Authors.\n\n" +
- "package build\n\n" +
- "// shouldBuild checks tags given by lines of the form\n" +
- "// +build tag\n" +
- "func shouldBuild(content []byte)\n"
- want3 := map[string]bool{}
-
- ctx := &Context{BuildTags: []string{"tag1"}}
- m := map[string]bool{}
- if !ctx.shouldBuild([]byte(file1), m, nil) {
- t.Errorf("shouldBuild(file1) = false, want true")
- }
- if !reflect.DeepEqual(m, want1) {
- t.Errorf("shouldBuild(file1) tags = %v, want %v", m, want1)
- }
-
- m = map[string]bool{}
- if ctx.shouldBuild([]byte(file2), m, nil) {
- t.Errorf("shouldBuild(file2) = true, want false")
- }
- if !reflect.DeepEqual(m, want2) {
- t.Errorf("shouldBuild(file2) tags = %v, want %v", m, want2)
- }
+var shouldBuildTests = []struct {
+ name string
+ content string
+ tags map[string]bool
+ binaryOnly bool
+ shouldBuild bool
+ err error
+}{
+ {
+ name: "Yes",
+ content: "// +build yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "Or",
+ content: "// +build no yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: true,
+ },
+ {
+ name: "And",
+ content: "// +build no,yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Cgo",
+ content: "// +build cgo\n\n" +
+ "// Copyright The Go Authors.\n\n" +
+ "// This package implements parsing of tags like\n" +
+ "// +build tag1\n" +
+ "package build",
+ tags: map[string]bool{"cgo": true},
+ shouldBuild: false,
+ },
+ {
+ name: "AfterPackage",
+ content: "// Copyright The Go Authors.\n\n" +
+ "package build\n\n" +
+ "// shouldBuild checks tags given by lines of the form\n" +
+ "// +build tag\n" +
+ "func shouldBuild(content []byte)\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooClose",
+ content: "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooCloseNo",
+ content: "// +build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "BinaryOnly",
+ content: "//go:binary-only-package\n" +
+ "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ binaryOnly: true,
+ shouldBuild: true,
+ },
+ {
+ name: "ValidGoBuild",
+ content: "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "MissingBuild",
+ content: "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "MissingBuild2",
+ content: "/* */\n" +
+ "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "MissingBuild2",
+ content: "/*\n" +
+ "// +build yes\n\n" +
+ "*/\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment1",
+ content: "/*\n" +
+ "//go:build no\n" +
+ "*/\n\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment2",
+ content: "/*\n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment3",
+ content: "/*/*/ /* hi *//* \n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment4",
+ content: "/**///go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment5",
+ content: "/**/\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+}
- m = map[string]bool{}
- ctx = &Context{BuildTags: nil}
- if !ctx.shouldBuild([]byte(file3), m, nil) {
- t.Errorf("shouldBuild(file3) = false, want true")
- }
- if !reflect.DeepEqual(m, want3) {
- t.Errorf("shouldBuild(file3) tags = %v, want %v", m, want3)
+func TestShouldBuild(t *testing.T) {
+ for _, tt := range shouldBuildTests {
+ t.Run(tt.name, func(t *testing.T) {
+ ctx := &Context{BuildTags: []string{"yes"}}
+ tags := map[string]bool{}
+ shouldBuild, binaryOnly, err := ctx.shouldBuild([]byte(tt.content), tags)
+ if shouldBuild != tt.shouldBuild || binaryOnly != tt.binaryOnly || !reflect.DeepEqual(tags, tt.tags) || err != tt.err {
+ t.Errorf("mismatch:\n"+
+ "have shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v\n"+
+ "want shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v",
+ shouldBuild, binaryOnly, tags, err,
+ tt.shouldBuild, tt.binaryOnly, tt.tags, tt.err)
+ }
+ })
}
}
@@ -250,7 +380,7 @@ func TestMatchFile(t *testing.T) {
}
func TestImportCmd(t *testing.T) {
- if (runtime.GOOS == "darwin" || runtime.GOOS == "ios") && runtime.GOARCH == "arm64" {
+ if runtime.GOOS == "ios" {
t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
}
@@ -482,11 +612,13 @@ func TestImportPackageOutsideModule(t *testing.T) {
ctxt.GOPATH = gopath
ctxt.Dir = filepath.Join(gopath, "src/example.com/p")
- want := "cannot find module providing package"
+ want := "working directory is not part of a module"
if _, err := ctxt.Import("example.com/p", gopath, FindOnly); err == nil {
t.Fatal("importing package when no go.mod is present succeeded unexpectedly")
} else if errStr := err.Error(); !strings.Contains(errStr, want) {
t.Fatalf("error when importing package when no go.mod is present: got %q; want %q", errStr, want)
+ } else {
+ t.Logf(`ctxt.Import("example.com/p", _, FindOnly): %v`, err)
}
}
@@ -547,9 +679,16 @@ func TestMissingImportErrorRepetition(t *testing.T) {
if err == nil {
t.Fatal("unexpected success")
}
+
// Don't count the package path with a URL like https://...?go-get=1.
// See golang.org/issue/35986.
errStr := strings.ReplaceAll(err.Error(), "://"+pkgPath+"?go-get=1", "://...?go-get=1")
+
+ // Also don't count instances in suggested "go get" or similar commands
+ // (see https://golang.org/issue/41576). The suggested command typically
+ // follows a semicolon.
+ errStr = strings.SplitN(errStr, ";", 2)[0]
+
if n := strings.Count(errStr, pkgPath); n != 1 {
t.Fatalf("package path %q appears in error %d times; should appear once\nerror: %v", pkgPath, n, err)
}
diff --git a/src/go/build/deps_test.go b/src/go/build/deps_test.go
index fa8ecf10f4..b26b2bd199 100644
--- a/src/go/build/deps_test.go
+++ b/src/go/build/deps_test.go
@@ -11,12 +11,12 @@ import (
"bytes"
"fmt"
"internal/testenv"
+ "io/fs"
"io/ioutil"
"os"
"path/filepath"
"runtime"
"sort"
- "strconv"
"strings"
"testing"
)
@@ -99,10 +99,16 @@ var depsRules = `
RUNTIME
< io;
+ syscall !< io;
reflect !< sort;
+ RUNTIME, unicode/utf8
+ < path;
+
+ unicode !< path;
+
# SYSCALL is RUNTIME plus the packages necessary for basic system calls.
- RUNTIME, unicode/utf8, unicode/utf16, io
+ RUNTIME, unicode/utf8, unicode/utf16
< internal/syscall/windows/sysdll, syscall/js
< syscall
< internal/syscall/unix, internal/syscall/windows, internal/syscall/windows/registry
@@ -116,6 +122,9 @@ var depsRules = `
< context
< TIME;
+ TIME, io, path, sort
+ < io/fs;
+
# MATH is RUNTIME plus the basic math packages.
RUNTIME
< math
@@ -129,6 +138,9 @@ var depsRules = `
MATH
< math/rand;
+ MATH
+ < runtime/metrics;
+
MATH, unicode/utf8
< strconv;
@@ -137,7 +149,7 @@ var depsRules = `
# STR is basic string and buffer manipulation.
RUNTIME, io, unicode/utf8, unicode/utf16, unicode
< bytes, strings
- < bufio, path;
+ < bufio;
bufio, path, strconv
< STR;
@@ -145,7 +157,7 @@ var depsRules = `
# OS is basic OS access, including helpers (path/filepath, os/exec, etc).
# OS includes string routines, but those must be layered above package os.
# OS does not include reflection.
- TIME, io, sort
+ io/fs
< internal/testlog
< internal/poll
< os
@@ -155,7 +167,9 @@ var depsRules = `
os/signal, STR
< path/filepath
- < io/ioutil, os/exec
+ < io/ioutil, os/exec;
+
+ io/ioutil, os/exec, os/signal
< OS;
reflect !< OS;
@@ -318,7 +332,6 @@ var depsRules = `
# so large dependencies must be kept out.
# This is a long-looking list but most of these
# are small with few dependencies.
- # math/rand should probably be removed at some point.
CGO,
golang.org/x/net/dns/dnsmessage,
golang.org/x/net/lif,
@@ -327,11 +340,11 @@ var depsRules = `
internal/poll,
internal/singleflight,
internal/race,
- math/rand,
os
< net;
fmt, unicode !< net;
+ math/rand !< net; # net uses runtime instead
# NET is net plus net-helper packages.
FMT, net
@@ -449,7 +462,7 @@ var depsRules = `
OS, compress/gzip, regexp
< internal/profile;
- html/template, internal/profile, net/http, runtime/pprof, runtime/trace
+ html, internal/profile, net/http, runtime/pprof, runtime/trace
< net/http/pprof;
# RPC
@@ -457,14 +470,19 @@ var depsRules = `
< net/rpc
< net/rpc/jsonrpc;
+ # System Information
+ internal/cpu, sync
+ < internal/sysinfo;
+
# Test-only
log
- < testing/iotest;
+ < testing/iotest
+ < testing/fstest;
FMT, flag, math/rand
< testing/quick;
- FMT, flag, runtime/debug, runtime/trace
+ FMT, flag, runtime/debug, runtime/trace, internal/sysinfo
< testing;
internal/testlog, runtime/pprof, regexp
@@ -479,7 +497,7 @@ var depsRules = `
CGO, OS, fmt
< os/signal/internal/pty;
- NET, testing
+ NET, testing, math/rand
< golang.org/x/net/nettest;
FMT, container/heap, math/rand
@@ -492,7 +510,7 @@ func listStdPkgs(goroot string) ([]string, error) {
var pkgs []string
src := filepath.Join(goroot, "src") + string(filepath.Separator)
- walkFn := func(path string, fi os.FileInfo, err error) error {
+ walkFn := func(path string, fi fs.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
@@ -594,24 +612,22 @@ func findImports(pkg string) ([]string, error) {
if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
continue
}
- f, err := os.Open(filepath.Join(dir, name))
+ var info fileInfo
+ info.name = filepath.Join(dir, name)
+ f, err := os.Open(info.name)
if err != nil {
return nil, err
}
- var imp []string
- data, err := readImports(f, false, &imp)
+ err = readGoInfo(f, &info)
f.Close()
if err != nil {
return nil, fmt.Errorf("reading %v: %v", name, err)
}
- if bytes.Contains(data, buildIgnore) {
+ if bytes.Contains(info.header, buildIgnore) {
continue
}
- for _, quoted := range imp {
- path, err := strconv.Unquote(quoted)
- if err != nil {
- continue
- }
+ for _, imp := range info.imports {
+ path := imp.path
if !haveImport[path] {
haveImport[path] = true
imports = append(imports, path)
diff --git a/src/go/build/read.go b/src/go/build/read.go
index 29b8cdc786..6806a51c24 100644
--- a/src/go/build/read.go
+++ b/src/go/build/read.go
@@ -7,7 +7,13 @@ package build
import (
"bufio"
"errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
"io"
+ "strconv"
+ "strings"
+ "unicode"
"unicode/utf8"
)
@@ -57,6 +63,29 @@ func (r *importReader) readByte() byte {
return c
}
+// readByteNoBuf is like readByte but doesn't buffer the byte.
+// It exhausts r.buf before reading from r.b.
+func (r *importReader) readByteNoBuf() byte {
+ if len(r.buf) > 0 {
+ c := r.buf[0]
+ r.buf = r.buf[1:]
+ return c
+ }
+ c, err := r.b.ReadByte()
+ if err == nil && c == 0 {
+ err = errNUL
+ }
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ c = 0
+ }
+ return c
+}
+
// peekByte returns the next byte from the input reader but does not advance beyond it.
// If skipSpace is set, peekByte skips leading spaces and comments.
func (r *importReader) peekByte(skipSpace bool) byte {
@@ -117,6 +146,74 @@ func (r *importReader) nextByte(skipSpace bool) byte {
return c
}
+var goEmbed = []byte("go:embed")
+
+// findEmbed advances the input reader to the next //go:embed comment.
+// It reports whether it found a comment.
+// (Otherwise it found an error or EOF.)
+func (r *importReader) findEmbed(first bool) bool {
+ // The import block scan stopped after a non-space character,
+ // so the reader is not at the start of a line on the first call.
+ // After that, each //go:embed extraction leaves the reader
+ // at the end of a line.
+ startLine := !first
+ var c byte
+ for r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ Reswitch:
+ switch c {
+ default:
+ startLine = false
+
+ case '\n':
+ startLine = true
+
+ case ' ', '\t':
+ // leave startLine alone
+
+ case '/':
+ c = r.readByteNoBuf()
+ switch c {
+ default:
+ startLine = false
+ goto Reswitch
+
+ case '*':
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByteNoBuf()
+ }
+ startLine = false
+
+ case '/':
+ if startLine {
+ // Try to read this as a //go:embed comment.
+ for i := range goEmbed {
+ c = r.readByteNoBuf()
+ if c != goEmbed[i] {
+ goto SkipSlashSlash
+ }
+ }
+ c = r.readByteNoBuf()
+ if c == ' ' || c == '\t' {
+ // Found one!
+ return true
+ }
+ }
+ SkipSlashSlash:
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ }
+ startLine = true
+ }
+ }
+ }
+ return false
+}
+
// readKeyword reads the given keyword from the input.
// If the keyword is not present, readKeyword records a syntax error.
func (r *importReader) readKeyword(kw string) {
@@ -147,15 +244,11 @@ func (r *importReader) readIdent() {
// readString reads a quoted string literal from the input.
// If an identifier is not present, readString records a syntax error.
-func (r *importReader) readString(save *[]string) {
+func (r *importReader) readString() {
switch r.nextByte(true) {
case '`':
- start := len(r.buf) - 1
for r.err == nil {
if r.nextByte(false) == '`' {
- if save != nil {
- *save = append(*save, string(r.buf[start:]))
- }
break
}
if r.eof {
@@ -163,13 +256,9 @@ func (r *importReader) readString(save *[]string) {
}
}
case '"':
- start := len(r.buf) - 1
for r.err == nil {
c := r.nextByte(false)
if c == '"' {
- if save != nil {
- *save = append(*save, string(r.buf[start:]))
- }
break
}
if r.eof || c == '\n' {
@@ -186,17 +275,17 @@ func (r *importReader) readString(save *[]string) {
// readImport reads an import clause - optional identifier followed by quoted string -
// from the input.
-func (r *importReader) readImport(imports *[]string) {
+func (r *importReader) readImport() {
c := r.peekByte(true)
if c == '.' {
r.peek = 0
} else if isIdent(c) {
r.readIdent()
}
- r.readString(imports)
+ r.readString()
}
-// readComments is like ioutil.ReadAll, except that it only reads the leading
+// readComments is like io.ReadAll, except that it only reads the leading
// block of comments in the file.
func readComments(f io.Reader) ([]byte, error) {
r := &importReader{b: bufio.NewReader(f)}
@@ -208,9 +297,14 @@ func readComments(f io.Reader) ([]byte, error) {
return r.buf, r.err
}
-// readImports is like ioutil.ReadAll, except that it expects a Go file as input
-// and stops reading the input once the imports have completed.
-func readImports(f io.Reader, reportSyntaxError bool, imports *[]string) ([]byte, error) {
+// readGoInfo expects a Go file as input and reads the file up to and including the import section.
+// It records what it learned in *info.
+// If info.fset is non-nil, readGoInfo parses the file and sets info.parsed, info.parseErr,
+// info.imports, info.embeds, and info.embedErr.
+//
+// It only returns an error if there are problems reading the file,
+// not for syntax errors in the file itself.
+func readGoInfo(f io.Reader, info *fileInfo) error {
r := &importReader{b: bufio.NewReader(f)}
r.readKeyword("package")
@@ -220,28 +314,162 @@ func readImports(f io.Reader, reportSyntaxError bool, imports *[]string) ([]byte
if r.peekByte(true) == '(' {
r.nextByte(false)
for r.peekByte(true) != ')' && r.err == nil {
- r.readImport(imports)
+ r.readImport()
}
r.nextByte(false)
} else {
- r.readImport(imports)
+ r.readImport()
}
}
+ info.header = r.buf
+
// If we stopped successfully before EOF, we read a byte that told us we were done.
// Return all but that last byte, which would cause a syntax error if we let it through.
if r.err == nil && !r.eof {
- return r.buf[:len(r.buf)-1], nil
+ info.header = r.buf[:len(r.buf)-1]
}
// If we stopped for a syntax error, consume the whole file so that
// we are sure we don't change the errors that go/parser returns.
- if r.err == errSyntax && !reportSyntaxError {
+ if r.err == errSyntax {
r.err = nil
for r.err == nil && !r.eof {
r.readByte()
}
+ info.header = r.buf
+ }
+ if r.err != nil {
+ return r.err
+ }
+
+ if info.fset == nil {
+ return nil
+ }
+
+ // Parse file header & record imports.
+ info.parsed, info.parseErr = parser.ParseFile(info.fset, info.name, info.header, parser.ImportsOnly|parser.ParseComments)
+ if info.parseErr != nil {
+ return nil
+ }
+
+ hasEmbed := false
+ for _, decl := range info.parsed.Decls {
+ d, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, dspec := range d.Specs {
+ spec, ok := dspec.(*ast.ImportSpec)
+ if !ok {
+ continue
+ }
+ quoted := spec.Path.Value
+ path, err := strconv.Unquote(quoted)
+ if err != nil {
+ return fmt.Errorf("parser returned invalid quoted string: <%s>", quoted)
+ }
+ if path == "embed" {
+ hasEmbed = true
+ }
+
+ doc := spec.Doc
+ if doc == nil && len(d.Specs) == 1 {
+ doc = d.Doc
+ }
+ info.imports = append(info.imports, fileImport{path, spec.Pos(), doc})
+ }
}
- return r.buf, r.err
+ // If the file imports "embed",
+ // we have to look for //go:embed comments
+ // in the remainder of the file.
+ // The compiler will enforce the mapping of comments to
+ // declared variables. We just need to know the patterns.
+ // If there were //go:embed comments earlier in the file
+ // (near the package statement or imports), the compiler
+ // will reject them. They can be (and have already been) ignored.
+ if hasEmbed {
+ var line []byte
+ for first := true; r.findEmbed(first); first = false {
+ line = line[:0]
+ for {
+ c := r.readByteNoBuf()
+ if c == '\n' || r.err != nil || r.eof {
+ break
+ }
+ line = append(line, c)
+ }
+ // Add args if line is well-formed.
+ // Ignore badly-formed lines - the compiler will report them when it finds them,
+ // and we can pretend they are not there to help go list succeed with what it knows.
+ args, err := parseGoEmbed(string(line))
+ if err == nil {
+ info.embeds = append(info.embeds, args...)
+ }
+ }
+ }
+
+ return nil
+}
+
+// parseGoEmbed parses the text following "//go:embed" to extract the glob patterns.
+// It accepts unquoted space-separated patterns as well as double-quoted and back-quoted Go strings.
+// There is a copy of this code in cmd/compile/internal/gc/noder.go as well.
+func parseGoEmbed(args string) ([]string, error) {
+ var list []string
+ for args = strings.TrimSpace(args); args != ""; args = strings.TrimSpace(args) {
+ var path string
+ Switch:
+ switch args[0] {
+ default:
+ i := len(args)
+ for j, c := range args {
+ if unicode.IsSpace(c) {
+ i = j
+ break
+ }
+ }
+ path = args[:i]
+ args = args[i:]
+
+ case '`':
+ i := strings.Index(args[1:], "`")
+ if i < 0 {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ path = args[1 : 1+i]
+ args = args[1+i+1:]
+
+ case '"':
+ i := 1
+ for ; i < len(args); i++ {
+ if args[i] == '\\' {
+ i++
+ continue
+ }
+ if args[i] == '"' {
+ q, err := strconv.Unquote(args[:i+1])
+ if err != nil {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
+ }
+ path = q
+ args = args[i+1:]
+ break Switch
+ }
+ }
+ if i >= len(args) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+
+ if args != "" {
+ r, _ := utf8.DecodeRuneInString(args)
+ if !unicode.IsSpace(r) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+ list = append(list, path)
+ }
+ return list, nil
}
diff --git a/src/go/build/read_test.go b/src/go/build/read_test.go
index 8636533f69..9264d2606f 100644
--- a/src/go/build/read_test.go
+++ b/src/go/build/read_test.go
@@ -5,7 +5,9 @@
package build
import (
+ "go/token"
"io"
+ "reflect"
"strings"
"testing"
)
@@ -13,12 +15,12 @@ import (
const quote = "`"
type readTest struct {
- // Test input contains â„™ where readImports should stop.
+ // Test input contains â„™ where readGoInfo should stop.
in string
err string
}
-var readImportsTests = []readTest{
+var readGoInfoTests = []readTest{
{
`package p`,
"",
@@ -37,15 +39,15 @@ var readImportsTests = []readTest{
},
{
`package p
-
+
// comment
-
+
import "x"
import _ "x"
import a "x"
-
+
/* comment */
-
+
import (
"x" /* comment */
_ "x"
@@ -59,7 +61,7 @@ var readImportsTests = []readTest{
import ()
import()import()import()
import();import();import()
-
+
â„™var x = 1
`,
"",
@@ -85,7 +87,7 @@ var readCommentsTests = []readTest{
/* bar */
/* quux */ // baz
-
+
/*/ zot */
// asdf
@@ -127,8 +129,12 @@ func testRead(t *testing.T, tests []readTest, read func(io.Reader) ([]byte, erro
}
}
-func TestReadImports(t *testing.T) {
- testRead(t, readImportsTests, func(r io.Reader) ([]byte, error) { return readImports(r, true, nil) })
+func TestReadGoInfo(t *testing.T) {
+ testRead(t, readGoInfoTests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
}
func TestReadComments(t *testing.T) {
@@ -202,11 +208,6 @@ var readFailuresTests = []readTest{
},
}
-func TestReadFailures(t *testing.T) {
- // Errors should be reported (true arg to readImports).
- testRead(t, readFailuresTests, func(r io.Reader) ([]byte, error) { return readImports(r, true, nil) })
-}
-
func TestReadFailuresIgnored(t *testing.T) {
// Syntax errors should not be reported (false arg to readImports).
// Instead, entire file should be the output and no error.
@@ -219,5 +220,63 @@ func TestReadFailuresIgnored(t *testing.T) {
tt.err = ""
}
}
- testRead(t, tests, func(r io.Reader) ([]byte, error) { return readImports(r, false, nil) })
+ testRead(t, tests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
+}
+
+var readEmbedTests = []struct {
+ in string
+ out []string
+}{
+ {
+ "package p\n",
+ nil,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y z\nvar files embed.FS",
+ []string{"x", "y", "z"},
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x \"\\x79\" `z`\nvar files embed.FS",
+ []string{"x", "y", "z"},
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y\n//go:embed z\nvar files embed.FS",
+ []string{"x", "y", "z"},
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n\t //go:embed x y\n\t //go:embed z\n\t var files embed.FS",
+ []string{"x", "y", "z"},
+ },
+ {
+ "package p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
+ []string{"x", "y", "z"},
+ },
+ {
+ "package p\n//go:embed x y z\n", // no import, no scan
+ nil,
+ },
+ {
+ "package p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
+ nil,
+ },
+}
+
+func TestReadEmbed(t *testing.T) {
+ fset := token.NewFileSet()
+ for i, tt := range readEmbedTests {
+ var info fileInfo
+ info.fset = fset
+ err := readGoInfo(strings.NewReader(tt.in), &info)
+ if err != nil {
+ t.Errorf("#%d: %v", i, err)
+ continue
+ }
+ if !reflect.DeepEqual(info.embeds, tt.out) {
+ t.Errorf("#%d: embeds=%v, want %v", i, info.embeds, tt.out)
+ }
+ }
}
diff --git a/src/go/doc/doc_test.go b/src/go/doc/doc_test.go
index f1e612c18b..ab98bed62b 100644
--- a/src/go/doc/doc_test.go
+++ b/src/go/doc/doc_test.go
@@ -12,8 +12,8 @@ import (
"go/parser"
"go/printer"
"go/token"
+ "io/fs"
"io/ioutil"
- "os"
"path/filepath"
"regexp"
"strings"
@@ -66,7 +66,7 @@ func indentFmt(indent, s string) string {
return indent + strings.ReplaceAll(s, "\n", "\n"+indent) + end
}
-func isGoFile(fi os.FileInfo) bool {
+func isGoFile(fi fs.FileInfo) bool {
name := fi.Name()
return !fi.IsDir() &&
len(name) > 0 && name[0] != '.' && // ignore .files
@@ -86,7 +86,7 @@ func test(t *testing.T, mode Mode) {
if err != nil {
t.Fatal(err)
}
- filter = func(fi os.FileInfo) bool {
+ filter = func(fi fs.FileInfo) bool {
return isGoFile(fi) && rx.MatchString(fi.Name())
}
}
diff --git a/src/go/doc/headscan.go b/src/go/doc/headscan.go
index 3f782cc1b4..8ea462366e 100644
--- a/src/go/doc/headscan.go
+++ b/src/go/doc/headscan.go
@@ -23,6 +23,7 @@ import (
"go/parser"
"go/token"
"internal/lazyregexp"
+ "io/fs"
"os"
"path/filepath"
"runtime"
@@ -39,7 +40,7 @@ var html_h = lazyregexp.New(`<h3 id="[^"]*">`)
const html_endh = "</h3>\n"
-func isGoFile(fi os.FileInfo) bool {
+func isGoFile(fi fs.FileInfo) bool {
return strings.HasSuffix(fi.Name(), ".go") &&
!strings.HasSuffix(fi.Name(), "_test.go")
}
@@ -68,7 +69,7 @@ func main() {
flag.Parse()
fset := token.NewFileSet()
nheadings := 0
- err := filepath.Walk(*root, func(path string, fi os.FileInfo, err error) error {
+ err := filepath.Walk(*root, func(path string, fi fs.FileInfo, err error) error {
if !fi.IsDir() {
return nil
}
diff --git a/src/go/internal/gccgoimporter/importer.go b/src/go/internal/gccgoimporter/importer.go
index 2494fd7b2a..94f2defd8d 100644
--- a/src/go/internal/gccgoimporter/importer.go
+++ b/src/go/internal/gccgoimporter/importer.go
@@ -221,7 +221,7 @@ func GetImporter(searchpaths []string, initmap map[*types.Package]InitData) Impo
// Excluded for now: Standard gccgo doesn't support this import format currently.
// case goimporterMagic:
// var data []byte
- // data, err = ioutil.ReadAll(reader)
+ // data, err = io.ReadAll(reader)
// if err != nil {
// return
// }
diff --git a/src/go/internal/gccgoimporter/importer_test.go b/src/go/internal/gccgoimporter/importer_test.go
index a74a456868..e4236a5867 100644
--- a/src/go/internal/gccgoimporter/importer_test.go
+++ b/src/go/internal/gccgoimporter/importer_test.go
@@ -97,6 +97,7 @@ var importerTests = [...]importerTest{
{pkgpath: "issue30628", name: "Apple", want: "type Apple struct{hey sync.RWMutex; x int; RQ [517]struct{Count uintptr; NumBytes uintptr; Last uintptr}}"},
{pkgpath: "issue31540", name: "S", gccgoVersion: 7, want: "type S struct{b int; map[Y]Z}"},
{pkgpath: "issue34182", name: "T1", want: "type T1 struct{f *T2}"},
+ {pkgpath: "notinheap", name: "S", want: "type S struct{}"},
}
func TestGoxImporter(t *testing.T) {
diff --git a/src/go/internal/gccgoimporter/parser.go b/src/go/internal/gccgoimporter/parser.go
index e2ef33f7ae..1b1d07d3f6 100644
--- a/src/go/internal/gccgoimporter/parser.go
+++ b/src/go/internal/gccgoimporter/parser.go
@@ -517,6 +517,13 @@ func (p *parser) parseNamedType(nlist []interface{}) types.Type {
p.errorf("%v has nil type", obj)
}
+ if p.tok == scanner.Ident && p.lit == "notinheap" {
+ p.next()
+ // The go/types package has no way of recording that
+ // this type is marked notinheap. Presumably no user
+ // of this package actually cares.
+ }
+
// type alias
if p.tok == '=' {
p.next()
diff --git a/src/go/internal/gccgoimporter/testdata/notinheap.go b/src/go/internal/gccgoimporter/testdata/notinheap.go
new file mode 100644
index 0000000000..b1ac967227
--- /dev/null
+++ b/src/go/internal/gccgoimporter/testdata/notinheap.go
@@ -0,0 +1,4 @@
+package notinheap
+
+//go:notinheap
+type S struct{}
diff --git a/src/go/internal/gccgoimporter/testdata/notinheap.gox b/src/go/internal/gccgoimporter/testdata/notinheap.gox
new file mode 100644
index 0000000000..cc438e75e0
--- /dev/null
+++ b/src/go/internal/gccgoimporter/testdata/notinheap.gox
@@ -0,0 +1,7 @@
+v3;
+package notinheap
+pkgpath notinheap
+init notinheap ~notinheap
+types 3 2 30 18
+type 1 "S" notinheap <type 2>
+type 2 struct { }
diff --git a/src/go/internal/gcimporter/gcimporter.go b/src/go/internal/gcimporter/gcimporter.go
index fda15eaaae..b74daca246 100644
--- a/src/go/internal/gcimporter/gcimporter.go
+++ b/src/go/internal/gcimporter/gcimporter.go
@@ -12,7 +12,6 @@ import (
"go/token"
"go/types"
"io"
- "io/ioutil"
"os"
"path/filepath"
"strings"
@@ -147,7 +146,7 @@ func Import(fset *token.FileSet, packages map[string]*types.Package, path, srcDi
case "$$B\n":
var data []byte
- data, err = ioutil.ReadAll(buf)
+ data, err = io.ReadAll(buf)
if err != nil {
break
}
diff --git a/src/go/internal/gcimporter/support.go b/src/go/internal/gcimporter/support.go
index 2de7cacd2d..b8bb14dc49 100644
--- a/src/go/internal/gcimporter/support.go
+++ b/src/go/internal/gcimporter/support.go
@@ -17,7 +17,10 @@ func errorf(format string, args ...interface{}) {
panic(fmt.Sprintf(format, args...))
}
-const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+// deltaNewFile is a magic line delta offset indicating a new file.
+// We use -64 because it is rare; see issue 20080 and CL 41619.
+// -64 is the smallest int that fits in a single byte as a varint.
+const deltaNewFile = -64
// Synthesize a token.Pos
type fakeFileSet struct {
diff --git a/src/go/parser/interface.go b/src/go/parser/interface.go
index 54f9d7b80a..cc7e455c4d 100644
--- a/src/go/parser/interface.go
+++ b/src/go/parser/interface.go
@@ -12,8 +12,8 @@ import (
"go/ast"
"go/token"
"io"
+ "io/fs"
"io/ioutil"
- "os"
"path/filepath"
"strings"
)
@@ -35,7 +35,7 @@ func readSource(filename string, src interface{}) ([]byte, error) {
return s.Bytes(), nil
}
case io.Reader:
- return ioutil.ReadAll(s)
+ return io.ReadAll(s)
}
return nil, errors.New("invalid source")
}
@@ -123,7 +123,7 @@ func ParseFile(fset *token.FileSet, filename string, src interface{}, mode Mode)
// directory specified by path and returns a map of package name -> package
// AST with all the packages found.
//
-// If filter != nil, only the files with os.FileInfo entries passing through
+// If filter != nil, only the files with fs.FileInfo entries passing through
// the filter (and ending in ".go") are considered. The mode bits are passed
// to ParseFile unchanged. Position information is recorded in fset, which
// must not be nil.
@@ -132,7 +132,7 @@ func ParseFile(fset *token.FileSet, filename string, src interface{}, mode Mode)
// returned. If a parse error occurred, a non-nil but incomplete map and the
// first error encountered are returned.
//
-func ParseDir(fset *token.FileSet, path string, filter func(os.FileInfo) bool, mode Mode) (pkgs map[string]*ast.Package, first error) {
+func ParseDir(fset *token.FileSet, path string, filter func(fs.FileInfo) bool, mode Mode) (pkgs map[string]*ast.Package, first error) {
list, err := ioutil.ReadDir(path)
if err != nil {
return nil, err
diff --git a/src/go/parser/parser_test.go b/src/go/parser/parser_test.go
index 25a374eeef..7193a329fe 100644
--- a/src/go/parser/parser_test.go
+++ b/src/go/parser/parser_test.go
@@ -9,7 +9,7 @@ import (
"fmt"
"go/ast"
"go/token"
- "os"
+ "io/fs"
"strings"
"testing"
)
@@ -40,7 +40,7 @@ func nameFilter(filename string) bool {
return false
}
-func dirFilter(f os.FileInfo) bool { return nameFilter(f.Name()) }
+func dirFilter(f fs.FileInfo) bool { return nameFilter(f.Name()) }
func TestParseFile(t *testing.T) {
src := "package p\nvar _=s[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]+\ns[::]"
diff --git a/src/go/printer/performance_test.go b/src/go/printer/performance_test.go
index 2e67154e6b..e23de3fbae 100644
--- a/src/go/printer/performance_test.go
+++ b/src/go/printer/performance_test.go
@@ -53,6 +53,6 @@ func BenchmarkPrint(b *testing.B) {
initialize()
}
for i := 0; i < b.N; i++ {
- testprint(ioutil.Discard, testfile)
+ testprint(io.Discard, testfile)
}
}
diff --git a/src/go/token/position.go b/src/go/token/position.go
index d0dbc2998f..a21f5fd056 100644
--- a/src/go/token/position.go
+++ b/src/go/token/position.go
@@ -150,12 +150,12 @@ func (f *File) AddLine(offset int) {
//
func (f *File) MergeLine(line int) {
if line < 1 {
- panic("illegal line number (line numbering starts at 1)")
+ panic(fmt.Sprintf("invalid line number %d (should be >= 1)", line))
}
f.mutex.Lock()
defer f.mutex.Unlock()
if line >= len(f.lines) {
- panic("illegal line number")
+ panic(fmt.Sprintf("invalid line number %d (should be < %d)", line, len(f.lines)))
}
// To merge the line numbered <line> with the line numbered <line+1>,
// we need to remove the entry in lines corresponding to the line
@@ -217,12 +217,12 @@ func (f *File) SetLinesForContent(content []byte) {
// LineStart panics if the 1-based line number is invalid.
func (f *File) LineStart(line int) Pos {
if line < 1 {
- panic("illegal line number (line numbering starts at 1)")
+ panic(fmt.Sprintf("invalid line number %d (should be >= 1)", line))
}
f.mutex.Lock()
defer f.mutex.Unlock()
if line > len(f.lines) {
- panic("illegal line number")
+ panic(fmt.Sprintf("invalid line number %d (should be < %d)", line, len(f.lines)))
}
return Pos(f.base + f.lines[line-1])
}
@@ -267,7 +267,7 @@ func (f *File) AddLineColumnInfo(offset int, filename string, line, column int)
//
func (f *File) Pos(offset int) Pos {
if offset > f.size {
- panic("illegal file offset")
+ panic(fmt.Sprintf("invalid file offset %d (should be <= %d)", offset, f.size))
}
return Pos(f.base + offset)
}
@@ -278,7 +278,7 @@ func (f *File) Pos(offset int) Pos {
//
func (f *File) Offset(p Pos) int {
if int(p) < f.base || int(p) > f.base+f.size {
- panic("illegal Pos value")
+ panic(fmt.Sprintf("invalid Pos value %d (should be in [%d, %d[)", p, f.base, f.base+f.size))
}
return int(p) - f.base
}
@@ -346,7 +346,7 @@ func (f *File) position(p Pos, adjusted bool) (pos Position) {
func (f *File) PositionFor(p Pos, adjusted bool) (pos Position) {
if p != NoPos {
if int(p) < f.base || int(p) > f.base+f.size {
- panic("illegal Pos value")
+ panic(fmt.Sprintf("invalid Pos value %d (should be in [%d, %d[)", p, f.base, f.base+f.size))
}
pos = f.position(p, adjusted)
}
@@ -430,8 +430,11 @@ func (s *FileSet) AddFile(filename string, base, size int) *File {
if base < 0 {
base = s.base
}
- if base < s.base || size < 0 {
- panic("illegal base or size")
+ if base < s.base {
+ panic(fmt.Sprintf("invalid base %d (should be >= %d)", base, s.base))
+ }
+ if size < 0 {
+ panic(fmt.Sprintf("invalid size %d (should be >= 0)", size))
}
// base >= s.base && size >= 0
f := &File{set: s, name: filename, base: base, size: size, lines: []int{0}}
diff --git a/src/go/types/decl.go b/src/go/types/decl.go
index 5c0e611c51..a022ec5678 100644
--- a/src/go/types/decl.go
+++ b/src/go/types/decl.go
@@ -381,6 +381,76 @@ func firstInSrc(path []Object) int {
return fst
}
+type (
+ decl interface {
+ node() ast.Node
+ }
+
+ importDecl struct{ spec *ast.ImportSpec }
+ constDecl struct {
+ spec *ast.ValueSpec
+ iota int
+ typ ast.Expr
+ init []ast.Expr
+ }
+ varDecl struct{ spec *ast.ValueSpec }
+ typeDecl struct{ spec *ast.TypeSpec }
+ funcDecl struct{ decl *ast.FuncDecl }
+)
+
+func (d importDecl) node() ast.Node { return d.spec }
+func (d constDecl) node() ast.Node { return d.spec }
+func (d varDecl) node() ast.Node { return d.spec }
+func (d typeDecl) node() ast.Node { return d.spec }
+func (d funcDecl) node() ast.Node { return d.decl }
+
+func (check *Checker) walkDecls(decls []ast.Decl, f func(decl)) {
+ for _, d := range decls {
+ check.walkDecl(d, f)
+ }
+}
+
+func (check *Checker) walkDecl(d ast.Decl, f func(decl)) {
+ switch d := d.(type) {
+ case *ast.BadDecl:
+ // ignore
+ case *ast.GenDecl:
+ var last *ast.ValueSpec // last ValueSpec with type or init exprs seen
+ for iota, s := range d.Specs {
+ switch s := s.(type) {
+ case *ast.ImportSpec:
+ f(importDecl{s})
+ case *ast.ValueSpec:
+ switch d.Tok {
+ case token.CONST:
+ // determine which initialization expressions to use
+ switch {
+ case s.Type != nil || len(s.Values) > 0:
+ last = s
+ case last == nil:
+ last = new(ast.ValueSpec) // make sure last exists
+ }
+ check.arityMatch(s, last)
+ f(constDecl{spec: s, iota: iota, init: last.Values, typ: last.Type})
+ case token.VAR:
+ check.arityMatch(s, nil)
+ f(varDecl{s})
+ default:
+ check.invalidAST(s.Pos(), "invalid token %s", d.Tok)
+ }
+ case *ast.TypeSpec:
+ f(typeDecl{s})
+ default:
+ check.invalidAST(s.Pos(), "unknown ast.Spec node %T", s)
+ }
+ }
+ case *ast.FuncDecl:
+ f(funcDecl{d})
+ default:
+ check.invalidAST(d.Pos(), "unknown ast.Decl node %T", d)
+ }
+}
+
func (check *Checker) constDecl(obj *Const, typ, init ast.Expr) {
assert(obj.typ == nil)
@@ -664,133 +734,105 @@ func (check *Checker) funcDecl(obj *Func, decl *declInfo) {
}
}
-func (check *Checker) declStmt(decl ast.Decl) {
+func (check *Checker) declStmt(d ast.Decl) {
pkg := check.pkg
- switch d := decl.(type) {
- case *ast.BadDecl:
- // ignore
+ check.walkDecl(d, func(d decl) {
+ switch d := d.(type) {
+ case constDecl:
+ top := len(check.delayed)
- case *ast.GenDecl:
- var last *ast.ValueSpec // last ValueSpec with type or init exprs seen
- for iota, spec := range d.Specs {
- switch s := spec.(type) {
- case *ast.ValueSpec:
- switch d.Tok {
- case token.CONST:
- top := len(check.delayed)
+ // declare all constants
+ lhs := make([]*Const, len(d.spec.Names))
+ for i, name := range d.spec.Names {
+ obj := NewConst(name.Pos(), pkg, name.Name, nil, constant.MakeInt64(int64(d.iota)))
+ lhs[i] = obj
- // determine which init exprs to use
- switch {
- case s.Type != nil || len(s.Values) > 0:
- last = s
- case last == nil:
- last = new(ast.ValueSpec) // make sure last exists
- }
-
- // declare all constants
- lhs := make([]*Const, len(s.Names))
- for i, name := range s.Names {
- obj := NewConst(name.Pos(), pkg, name.Name, nil, constant.MakeInt64(int64(iota)))
- lhs[i] = obj
-
- var init ast.Expr
- if i < len(last.Values) {
- init = last.Values[i]
- }
+ var init ast.Expr
+ if i < len(d.init) {
+ init = d.init[i]
+ }
- check.constDecl(obj, last.Type, init)
- }
+ check.constDecl(obj, d.typ, init)
+ }
- check.arityMatch(s, last)
+ // process function literals in init expressions before scope changes
+ check.processDelayed(top)
- // process function literals in init expressions before scope changes
- check.processDelayed(top)
+ // spec: "The scope of a constant or variable identifier declared
+ // inside a function begins at the end of the ConstSpec or VarSpec
+ // (ShortVarDecl for short variable declarations) and ends at the
+ // end of the innermost containing block."
+ scopePos := d.spec.End()
+ for i, name := range d.spec.Names {
+ check.declare(check.scope, name, lhs[i], scopePos)
+ }
- // spec: "The scope of a constant or variable identifier declared
- // inside a function begins at the end of the ConstSpec or VarSpec
- // (ShortVarDecl for short variable declarations) and ends at the
- // end of the innermost containing block."
- scopePos := s.End()
- for i, name := range s.Names {
- check.declare(check.scope, name, lhs[i], scopePos)
- }
+ case varDecl:
+ top := len(check.delayed)
- case token.VAR:
- top := len(check.delayed)
+ lhs0 := make([]*Var, len(d.spec.Names))
+ for i, name := range d.spec.Names {
+ lhs0[i] = NewVar(name.Pos(), pkg, name.Name, nil)
+ }
- lhs0 := make([]*Var, len(s.Names))
- for i, name := range s.Names {
- lhs0[i] = NewVar(name.Pos(), pkg, name.Name, nil)
+ // initialize all variables
+ for i, obj := range lhs0 {
+ var lhs []*Var
+ var init ast.Expr
+ switch len(d.spec.Values) {
+ case len(d.spec.Names):
+ // lhs and rhs match
+ init = d.spec.Values[i]
+ case 1:
+ // rhs is expected to be a multi-valued expression
+ lhs = lhs0
+ init = d.spec.Values[0]
+ default:
+ if i < len(d.spec.Values) {
+ init = d.spec.Values[i]
}
-
- // initialize all variables
- for i, obj := range lhs0 {
- var lhs []*Var
- var init ast.Expr
- switch len(s.Values) {
- case len(s.Names):
- // lhs and rhs match
- init = s.Values[i]
- case 1:
- // rhs is expected to be a multi-valued expression
- lhs = lhs0
- init = s.Values[0]
- default:
- if i < len(s.Values) {
- init = s.Values[i]
- }
- }
- check.varDecl(obj, lhs, s.Type, init)
- if len(s.Values) == 1 {
- // If we have a single lhs variable we are done either way.
- // If we have a single rhs expression, it must be a multi-
- // valued expression, in which case handling the first lhs
- // variable will cause all lhs variables to have a type
- // assigned, and we are done as well.
- if debug {
- for _, obj := range lhs0 {
- assert(obj.typ != nil)
- }
- }
- break
+ }
+ check.varDecl(obj, lhs, d.spec.Type, init)
+ if len(d.spec.Values) == 1 {
+ // If we have a single lhs variable we are done either way.
+ // If we have a single rhs expression, it must be a multi-
+ // valued expression, in which case handling the first lhs
+ // variable will cause all lhs variables to have a type
+ // assigned, and we are done as well.
+ if debug {
+ for _, obj := range lhs0 {
+ assert(obj.typ != nil)
}
}
-
- check.arityMatch(s, nil)
-
- // process function literals in init expressions before scope changes
- check.processDelayed(top)
-
- // declare all variables
- // (only at this point are the variable scopes (parents) set)
- scopePos := s.End() // see constant declarations
- for i, name := range s.Names {
- // see constant declarations
- check.declare(check.scope, name, lhs0[i], scopePos)
- }
-
- default:
- check.invalidAST(s.Pos(), "invalid token %s", d.Tok)
+ break
}
+ }
- case *ast.TypeSpec:
- obj := NewTypeName(s.Name.Pos(), pkg, s.Name.Name, nil)
- // spec: "The scope of a type identifier declared inside a function
- // begins at the identifier in the TypeSpec and ends at the end of
- // the innermost containing block."
- scopePos := s.Name.Pos()
- check.declare(check.scope, s.Name, obj, scopePos)
- // mark and unmark type before calling typeDecl; its type is still nil (see Checker.objDecl)
- obj.setColor(grey + color(check.push(obj)))
- check.typeDecl(obj, s.Type, nil, s.Assign.IsValid())
- check.pop().setColor(black)
- default:
- check.invalidAST(s.Pos(), "const, type, or var declaration expected")
+ // process function literals in init expressions before scope changes
+ check.processDelayed(top)
+
+ // declare all variables
+ // (only at this point are the variable scopes (parents) set)
+ scopePos := d.spec.End() // see constant declarations
+ for i, name := range d.spec.Names {
+ // see constant declarations
+ check.declare(check.scope, name, lhs0[i], scopePos)
}
- }
- default:
- check.invalidAST(d.Pos(), "unknown ast.Decl node %T", d)
- }
+ case typeDecl:
+ obj := NewTypeName(d.spec.Name.Pos(), pkg, d.spec.Name.Name, nil)
+ // spec: "The scope of a type identifier declared inside a function
+ // begins at the identifier in the TypeSpec and ends at the end of
+ // the innermost containing block."
+ scopePos := d.spec.Name.Pos()
+ check.declare(check.scope, d.spec.Name, obj, scopePos)
+ // mark and unmark type before calling typeDecl; its type is still nil (see Checker.objDecl)
+ obj.setColor(grey + color(check.push(obj)))
+ check.typeDecl(obj, d.spec.Type, nil, d.spec.Assign.IsValid())
+ check.pop().setColor(black)
+ default:
+ check.invalidAST(d.node().Pos(), "unknown ast.Decl node %T", d.node())
+ }
+ })
}
diff --git a/src/go/types/gotype.go b/src/go/types/gotype.go
index eacf68f52f..52709df17b 100644
--- a/src/go/types/gotype.go
+++ b/src/go/types/gotype.go
@@ -88,7 +88,7 @@ import (
"go/scanner"
"go/token"
"go/types"
- "io/ioutil"
+ "io"
"os"
"path/filepath"
"sync"
@@ -191,7 +191,7 @@ func parse(filename string, src interface{}) (*ast.File, error) {
}
func parseStdin() (*ast.File, error) {
- src, err := ioutil.ReadAll(os.Stdin)
+ src, err := io.ReadAll(os.Stdin)
if err != nil {
return nil, err
}
diff --git a/src/go/types/resolver.go b/src/go/types/resolver.go
index 078adc5ec7..cce222cbc5 100644
--- a/src/go/types/resolver.go
+++ b/src/go/types/resolver.go
@@ -235,179 +235,147 @@ func (check *Checker) collectObjects() {
// we get "." as the directory which is what we would want.
fileDir := dir(check.fset.Position(file.Name.Pos()).Filename)
- for _, decl := range file.Decls {
- switch d := decl.(type) {
- case *ast.BadDecl:
- // ignore
-
- case *ast.GenDecl:
- var last *ast.ValueSpec // last ValueSpec with type or init exprs seen
- for iota, spec := range d.Specs {
- switch s := spec.(type) {
- case *ast.ImportSpec:
- // import package
- path, err := validatedImportPath(s.Path.Value)
- if err != nil {
- check.errorf(s.Path.Pos(), "invalid import path (%s)", err)
- continue
- }
+ check.walkDecls(file.Decls, func(d decl) {
+ switch d := d.(type) {
+ case importDecl:
+ // import package
+ path, err := validatedImportPath(d.spec.Path.Value)
+ if err != nil {
+ check.errorf(d.spec.Path.Pos(), "invalid import path (%s)", err)
+ return
+ }
- imp := check.importPackage(s.Path.Pos(), path, fileDir)
- if imp == nil {
- continue
- }
+ imp := check.importPackage(d.spec.Path.Pos(), path, fileDir)
+ if imp == nil {
+ return
+ }
- // add package to list of explicit imports
- // (this functionality is provided as a convenience
- // for clients; it is not needed for type-checking)
- if !pkgImports[imp] {
- pkgImports[imp] = true
- pkg.imports = append(pkg.imports, imp)
- }
+ // add package to list of explicit imports
+ // (this functionality is provided as a convenience
+ // for clients; it is not needed for type-checking)
+ if !pkgImports[imp] {
+ pkgImports[imp] = true
+ pkg.imports = append(pkg.imports, imp)
+ }
- // local name overrides imported package name
- name := imp.name
- if s.Name != nil {
- name = s.Name.Name
- if path == "C" {
- // match cmd/compile (not prescribed by spec)
- check.errorf(s.Name.Pos(), `cannot rename import "C"`)
- continue
- }
- if name == "init" {
- check.errorf(s.Name.Pos(), "cannot declare init - must be func")
- continue
- }
- }
+ // local name overrides imported package name
+ name := imp.name
+ if d.spec.Name != nil {
+ name = d.spec.Name.Name
+ if path == "C" {
+ // match cmd/compile (not prescribed by spec)
+ check.errorf(d.spec.Name.Pos(), `cannot rename import "C"`)
+ return
+ }
+ if name == "init" {
+ check.errorf(d.spec.Name.Pos(), "cannot declare init - must be func")
+ return
+ }
+ }
- obj := NewPkgName(s.Pos(), pkg, name, imp)
- if s.Name != nil {
- // in a dot-import, the dot represents the package
- check.recordDef(s.Name, obj)
- } else {
- check.recordImplicit(s, obj)
- }
+ obj := NewPkgName(d.spec.Pos(), pkg, name, imp)
+ if d.spec.Name != nil {
+ // in a dot-import, the dot represents the package
+ check.recordDef(d.spec.Name, obj)
+ } else {
+ check.recordImplicit(d.spec, obj)
+ }
- if path == "C" {
- // match cmd/compile (not prescribed by spec)
- obj.used = true
- }
+ if path == "C" {
+ // match cmd/compile (not prescribed by spec)
+ obj.used = true
+ }
- // add import to file scope
- if name == "." {
- // merge imported scope with file scope
- for _, obj := range imp.scope.elems {
- // A package scope may contain non-exported objects,
- // do not import them!
- if obj.Exported() {
- // declare dot-imported object
- // (Do not use check.declare because it modifies the object
- // via Object.setScopePos, which leads to a race condition;
- // the object may be imported into more than one file scope
- // concurrently. See issue #32154.)
- if alt := fileScope.Insert(obj); alt != nil {
- check.errorf(s.Name.Pos(), "%s redeclared in this block", obj.Name())
- check.reportAltDecl(alt)
- }
- }
+ // add import to file scope
+ if name == "." {
+ // merge imported scope with file scope
+ for _, obj := range imp.scope.elems {
+ // A package scope may contain non-exported objects,
+ // do not import them!
+ if obj.Exported() {
+ // declare dot-imported object
+ // (Do not use check.declare because it modifies the object
+ // via Object.setScopePos, which leads to a race condition;
+ // the object may be imported into more than one file scope
+ // concurrently. See issue #32154.)
+ if alt := fileScope.Insert(obj); alt != nil {
+ check.errorf(d.spec.Name.Pos(), "%s redeclared in this block", obj.Name())
+ check.reportAltDecl(alt)
}
- // add position to set of dot-import positions for this file
- // (this is only needed for "imported but not used" errors)
- check.addUnusedDotImport(fileScope, imp, s.Pos())
- } else {
- // declare imported package object in file scope
- // (no need to provide s.Name since we called check.recordDef earlier)
- check.declare(fileScope, nil, obj, token.NoPos)
}
+ }
+ // add position to set of dot-import positions for this file
+ // (this is only needed for "imported but not used" errors)
+ check.addUnusedDotImport(fileScope, imp, d.spec.Pos())
+ } else {
+ // declare imported package object in file scope
+ // (no need to provide s.Name since we called check.recordDef earlier)
+ check.declare(fileScope, nil, obj, token.NoPos)
+ }
+ case constDecl:
+ // declare all constants
+ for i, name := range d.spec.Names {
+ obj := NewConst(name.Pos(), pkg, name.Name, nil, constant.MakeInt64(int64(d.iota)))
+
+ var init ast.Expr
+ if i < len(d.init) {
+ init = d.init[i]
+ }
- case *ast.ValueSpec:
- switch d.Tok {
- case token.CONST:
- // determine which initialization expressions to use
- switch {
- case s.Type != nil || len(s.Values) > 0:
- last = s
- case last == nil:
- last = new(ast.ValueSpec) // make sure last exists
- }
-
- // declare all constants
- for i, name := range s.Names {
- obj := NewConst(name.Pos(), pkg, name.Name, nil, constant.MakeInt64(int64(iota)))
-
- var init ast.Expr
- if i < len(last.Values) {
- init = last.Values[i]
- }
-
- d := &declInfo{file: fileScope, typ: last.Type, init: init}
- check.declarePkgObj(name, obj, d)
- }
-
- check.arityMatch(s, last)
-
- case token.VAR:
- lhs := make([]*Var, len(s.Names))
- // If there's exactly one rhs initializer, use
- // the same declInfo d1 for all lhs variables
- // so that each lhs variable depends on the same
- // rhs initializer (n:1 var declaration).
- var d1 *declInfo
- if len(s.Values) == 1 {
- // The lhs elements are only set up after the for loop below,
- // but that's ok because declareVar only collects the declInfo
- // for a later phase.
- d1 = &declInfo{file: fileScope, lhs: lhs, typ: s.Type, init: s.Values[0]}
- }
-
- // declare all variables
- for i, name := range s.Names {
- obj := NewVar(name.Pos(), pkg, name.Name, nil)
- lhs[i] = obj
-
- d := d1
- if d == nil {
- // individual assignments
- var init ast.Expr
- if i < len(s.Values) {
- init = s.Values[i]
- }
- d = &declInfo{file: fileScope, typ: s.Type, init: init}
- }
-
- check.declarePkgObj(name, obj, d)
- }
+ d := &declInfo{file: fileScope, typ: d.typ, init: init}
+ check.declarePkgObj(name, obj, d)
+ }
- check.arityMatch(s, nil)
+ case varDecl:
+ lhs := make([]*Var, len(d.spec.Names))
+ // If there's exactly one rhs initializer, use
+ // the same declInfo d1 for all lhs variables
+ // so that each lhs variable depends on the same
+ // rhs initializer (n:1 var declaration).
+ var d1 *declInfo
+ if len(d.spec.Values) == 1 {
+ // The lhs elements are only set up after the for loop below,
+ // but that's ok because declareVar only collects the declInfo
+ // for a later phase.
+ d1 = &declInfo{file: fileScope, lhs: lhs, typ: d.spec.Type, init: d.spec.Values[0]}
+ }
- default:
- check.invalidAST(s.Pos(), "invalid token %s", d.Tok)
+ // declare all variables
+ for i, name := range d.spec.Names {
+ obj := NewVar(name.Pos(), pkg, name.Name, nil)
+ lhs[i] = obj
+
+ di := d1
+ if di == nil {
+ // individual assignments
+ var init ast.Expr
+ if i < len(d.spec.Values) {
+ init = d.spec.Values[i]
}
-
- case *ast.TypeSpec:
- obj := NewTypeName(s.Name.Pos(), pkg, s.Name.Name, nil)
- check.declarePkgObj(s.Name, obj, &declInfo{file: fileScope, typ: s.Type, alias: s.Assign.IsValid()})
-
- default:
- check.invalidAST(s.Pos(), "unknown ast.Spec node %T", s)
+ di = &declInfo{file: fileScope, typ: d.spec.Type, init: init}
}
- }
- case *ast.FuncDecl:
- name := d.Name.Name
- obj := NewFunc(d.Name.Pos(), pkg, name, nil)
- if d.Recv == nil {
+ check.declarePkgObj(name, obj, di)
+ }
+ case typeDecl:
+ obj := NewTypeName(d.spec.Name.Pos(), pkg, d.spec.Name.Name, nil)
+ check.declarePkgObj(d.spec.Name, obj, &declInfo{file: fileScope, typ: d.spec.Type, alias: d.spec.Assign.IsValid()})
+ case funcDecl:
+ info := &declInfo{file: fileScope, fdecl: d.decl}
+ name := d.decl.Name.Name
+ obj := NewFunc(d.decl.Name.Pos(), pkg, name, nil)
+ if d.decl.Recv == nil {
// regular function
if name == "init" {
// don't declare init functions in the package scope - they are invisible
obj.parent = pkg.scope
- check.recordDef(d.Name, obj)
+ check.recordDef(d.decl.Name, obj)
// init functions must have a body
- if d.Body == nil {
+ if d.decl.Body == nil {
check.softErrorf(obj.pos, "missing function body")
}
} else {
- check.declare(pkg.scope, d.Name, obj, token.NoPos)
+ check.declare(pkg.scope, d.decl.Name, obj, token.NoPos)
}
} else {
// method
@@ -417,20 +385,16 @@ func (check *Checker) collectObjects() {
if name != "_" {
methods = append(methods, obj)
}
- check.recordDef(d.Name, obj)
+ check.recordDef(d.decl.Name, obj)
}
- info := &declInfo{file: fileScope, fdecl: d}
// Methods are not package-level objects but we still track them in the
// object map so that we can handle them like regular functions (if the
// receiver is invalid); also we need their fdecl info when associating
// them with their receiver base type, below.
check.objMap[obj] = info
obj.setOrder(uint32(len(check.objMap)))
-
- default:
- check.invalidAST(d.Pos(), "unknown ast.Decl node %T", d)
}
- }
+ })
}
// verify that objects in package and file scopes have different names
diff --git a/src/go/types/self_test.go b/src/go/types/self_test.go
index 04c9cd3458..b5f6bfe532 100644
--- a/src/go/types/self_test.go
+++ b/src/go/types/self_test.go
@@ -5,12 +5,11 @@
package types_test
import (
- "flag"
- "fmt"
"go/ast"
"go/importer"
"go/parser"
"go/token"
+ "path"
"path/filepath"
"testing"
"time"
@@ -18,8 +17,6 @@ import (
. "go/types"
)
-var benchmark = flag.Bool("b", false, "run benchmarks")
-
func TestSelf(t *testing.T) {
fset := token.NewFileSet()
files, err := pkgFiles(fset, ".")
@@ -39,46 +36,39 @@ func TestSelf(t *testing.T) {
}
}
-func TestBenchmark(t *testing.T) {
- if !*benchmark {
- return
- }
-
- // We're not using testing's benchmarking mechanism directly
- // because we want custom output.
-
+func BenchmarkCheck(b *testing.B) {
for _, p := range []string{
"net/http",
"go/parser",
"go/constant",
filepath.Join("go", "internal", "gcimporter"),
} {
- path := filepath.Join("..", "..", p)
- runbench(t, path, false)
- runbench(t, path, true)
- fmt.Println()
+ b.Run(path.Base(p), func(b *testing.B) {
+ path := filepath.Join("..", "..", p)
+ for _, ignoreFuncBodies := range []bool{false, true} {
+ name := "funcbodies"
+ if ignoreFuncBodies {
+ name = "nofuncbodies"
+ }
+ b.Run(name, func(b *testing.B) {
+ b.Run("info", func(b *testing.B) {
+ runbench(b, path, ignoreFuncBodies, true)
+ })
+ b.Run("noinfo", func(b *testing.B) {
+ runbench(b, path, ignoreFuncBodies, false)
+ })
+ })
+ }
+ })
}
}
-func runbench(t *testing.T, path string, ignoreFuncBodies bool) {
+func runbench(b *testing.B, path string, ignoreFuncBodies, writeInfo bool) {
fset := token.NewFileSet()
files, err := pkgFiles(fset, path)
if err != nil {
- t.Fatal(err)
+ b.Fatal(err)
}
-
- b := testing.Benchmark(func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- conf := Config{
- IgnoreFuncBodies: ignoreFuncBodies,
- Importer: importer.Default(),
- }
- if _, err := conf.Check(path, fset, files, nil); err != nil {
- t.Fatal(err)
- }
- }
- })
-
// determine line count
lines := 0
fset.Iterate(func(f *token.File) bool {
@@ -86,10 +76,30 @@ func runbench(t *testing.T, path string, ignoreFuncBodies bool) {
return true
})
- d := time.Duration(b.NsPerOp())
- fmt.Printf("%s (ignoreFuncBodies = %v):\n", filepath.Base(path), ignoreFuncBodies)
- fmt.Printf("\t%s for %d lines (%.0f lines/s)\n", d, lines, float64(lines)/d.Seconds())
- fmt.Printf("\t%s\n", b.MemString())
+ b.ResetTimer()
+ start := time.Now()
+ for i := 0; i < b.N; i++ {
+ conf := Config{
+ IgnoreFuncBodies: ignoreFuncBodies,
+ Importer: importer.Default(),
+ }
+ var info *Info
+ if writeInfo {
+ info = &Info{
+ Types: make(map[ast.Expr]TypeAndValue),
+ Defs: make(map[*ast.Ident]Object),
+ Uses: make(map[*ast.Ident]Object),
+ Implicits: make(map[ast.Node]Object),
+ Selections: make(map[*ast.SelectorExpr]*Selection),
+ Scopes: make(map[ast.Node]*Scope),
+ }
+ }
+ if _, err := conf.Check(path, fset, files, info); err != nil {
+ b.Fatal(err)
+ }
+ }
+ b.StopTimer()
+ b.ReportMetric(float64(lines)*float64(b.N)/time.Since(start).Seconds(), "lines/s")
}
func pkgFiles(fset *token.FileSet, path string) ([]*ast.File, error) {
diff --git a/src/go/types/stdlib_test.go b/src/go/types/stdlib_test.go
index f5a3273fa1..669e7bec20 100644
--- a/src/go/types/stdlib_test.go
+++ b/src/go/types/stdlib_test.go
@@ -183,6 +183,8 @@ func TestStdFixed(t *testing.T) {
"issue31747.go", // go/types does not have constraints on language level (-lang=go1.12) (see #31793)
"issue34329.go", // go/types does not have constraints on language level (-lang=go1.13) (see #31793)
"bug251.go", // issue #34333 which was exposed with fix for #34151
+ "issue42058a.go", // go/types does not have constraints on channel element size
+ "issue42058b.go", // go/types does not have constraints on channel element size
)
}