aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/noder
diff options
context:
space:
mode:
authorMatthew Dempsky <mdempsky@google.com>2021-01-24 10:52:00 -0800
committerMatthew Dempsky <mdempsky@google.com>2021-01-25 01:31:37 +0000
commit063c72f06d8673f3a2a03fd549c61935ca3e5cc5 (patch)
tree38600b03f3aa47fa87772e1c7e9c8659fc6a8200 /src/cmd/compile/internal/noder
parentd05d6fab32cb3d47f8682d19ca11085430f39164 (diff)
downloadgo-063c72f06d8673f3a2a03fd549c61935ca3e5cc5.tar.gz
go-063c72f06d8673f3a2a03fd549c61935ca3e5cc5.zip
[dev.regabi] cmd/compile: backport changes from dev.typeparams (9456804)
This CL backports a bunch of changes that landed on dev.typeparams, but are not dependent on types2 or generics. By backporting, we reduce the divergence between development branches, hopefully improving test coverage and reducing risk of merge conflicts. Updates #43866. Change-Id: I382510855c9b5fac52b17066e44a00bd07fe86f5 Reviewed-on: https://go-review.googlesource.com/c/go/+/286172 Trust: Matthew Dempsky <mdempsky@google.com> Trust: Robert Griesemer <gri@golang.org> Run-TryBot: Matthew Dempsky <mdempsky@google.com> Reviewed-by: Robert Griesemer <gri@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/noder')
-rw-r--r--src/cmd/compile/internal/noder/import.go239
-rw-r--r--src/cmd/compile/internal/noder/noder.go268
-rw-r--r--src/cmd/compile/internal/noder/posmap.go83
3 files changed, 279 insertions, 311 deletions
diff --git a/src/cmd/compile/internal/noder/import.go b/src/cmd/compile/internal/noder/import.go
index ca041a156c..747c30e6ff 100644
--- a/src/cmd/compile/internal/noder/import.go
+++ b/src/cmd/compile/internal/noder/import.go
@@ -5,18 +5,20 @@
package noder
import (
+ "errors"
"fmt"
- "go/constant"
"os"
- "path"
+ pathpkg "path"
"runtime"
"sort"
+ "strconv"
"strings"
"unicode"
"unicode/utf8"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/archive"
@@ -38,160 +40,157 @@ func islocalname(name string) bool {
strings.HasPrefix(name, "../") || name == ".."
}
-func findpkg(name string) (file string, ok bool) {
- if islocalname(name) {
+func openPackage(path string) (*os.File, error) {
+ if islocalname(path) {
if base.Flag.NoLocalImports {
- return "", false
+ return nil, errors.New("local imports disallowed")
}
if base.Flag.Cfg.PackageFile != nil {
- file, ok = base.Flag.Cfg.PackageFile[name]
- return file, ok
+ return os.Open(base.Flag.Cfg.PackageFile[path])
}
- // try .a before .6. important for building libraries:
- // if there is an array.6 in the array.a library,
- // want to find all of array.a, not just array.6.
- file = fmt.Sprintf("%s.a", name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ // try .a before .o. important for building libraries:
+ // if there is an array.o in the array.a library,
+ // want to find all of array.a, not just array.o.
+ if file, err := os.Open(fmt.Sprintf("%s.a", path)); err == nil {
+ return file, nil
}
- file = fmt.Sprintf("%s.o", name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ if file, err := os.Open(fmt.Sprintf("%s.o", path)); err == nil {
+ return file, nil
}
- return "", false
+ return nil, errors.New("file not found")
}
// local imports should be canonicalized already.
// don't want to see "encoding/../encoding/base64"
// as different from "encoding/base64".
- if q := path.Clean(name); q != name {
- base.Errorf("non-canonical import path %q (should be %q)", name, q)
- return "", false
+ if q := pathpkg.Clean(path); q != path {
+ return nil, fmt.Errorf("non-canonical import path %q (should be %q)", path, q)
}
if base.Flag.Cfg.PackageFile != nil {
- file, ok = base.Flag.Cfg.PackageFile[name]
- return file, ok
+ return os.Open(base.Flag.Cfg.PackageFile[path])
}
for _, dir := range base.Flag.Cfg.ImportDirs {
- file = fmt.Sprintf("%s/%s.a", dir, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ if file, err := os.Open(fmt.Sprintf("%s/%s.a", dir, path)); err == nil {
+ return file, nil
}
- file = fmt.Sprintf("%s/%s.o", dir, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ if file, err := os.Open(fmt.Sprintf("%s/%s.o", dir, path)); err == nil {
+ return file, nil
}
}
if objabi.GOROOT != "" {
suffix := ""
- suffixsep := ""
if base.Flag.InstallSuffix != "" {
- suffixsep = "_"
- suffix = base.Flag.InstallSuffix
+ suffix = "_" + base.Flag.InstallSuffix
} else if base.Flag.Race {
- suffixsep = "_"
- suffix = "race"
+ suffix = "_race"
} else if base.Flag.MSan {
- suffixsep = "_"
- suffix = "msan"
+ suffix = "_msan"
}
- file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.a", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ if file, err := os.Open(fmt.Sprintf("%s/pkg/%s_%s%s/%s.a", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffix, path)); err == nil {
+ return file, nil
}
- file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.o", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
+ if file, err := os.Open(fmt.Sprintf("%s/pkg/%s_%s%s/%s.o", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffix, path)); err == nil {
+ return file, nil
}
}
-
- return "", false
+ return nil, errors.New("file not found")
}
// myheight tracks the local package's height based on packages
// imported so far.
var myheight int
-func importfile(f constant.Value) *types.Pkg {
- if f.Kind() != constant.String {
- base.Errorf("import path must be a string")
- return nil
- }
-
- path_ := constant.StringVal(f)
- if len(path_) == 0 {
- base.Errorf("import path is empty")
- return nil
- }
-
- if isbadimport(path_, false) {
- return nil
- }
-
+// resolveImportPath resolves an import path as it appears in a Go
+// source file to the package's full path.
+func resolveImportPath(path string) (string, error) {
// The package name main is no longer reserved,
// but we reserve the import path "main" to identify
// the main package, just as we reserve the import
// path "math" to identify the standard math package.
- if path_ == "main" {
- base.Errorf("cannot import \"main\"")
- base.ErrorExit()
- }
-
- if base.Ctxt.Pkgpath != "" && path_ == base.Ctxt.Pkgpath {
- base.Errorf("import %q while compiling that package (import cycle)", path_)
- base.ErrorExit()
+ if path == "main" {
+ return "", errors.New("cannot import \"main\"")
}
- if mapped, ok := base.Flag.Cfg.ImportMap[path_]; ok {
- path_ = mapped
+ if base.Ctxt.Pkgpath != "" && path == base.Ctxt.Pkgpath {
+ return "", fmt.Errorf("import %q while compiling that package (import cycle)", path)
}
- if path_ == "unsafe" {
- return ir.Pkgs.Unsafe
+ if mapped, ok := base.Flag.Cfg.ImportMap[path]; ok {
+ path = mapped
}
- if islocalname(path_) {
- if path_[0] == '/' {
- base.Errorf("import path cannot be absolute path")
- return nil
+ if islocalname(path) {
+ if path[0] == '/' {
+ return "", errors.New("import path cannot be absolute path")
}
- prefix := base.Ctxt.Pathname
- if base.Flag.D != "" {
- prefix = base.Flag.D
+ prefix := base.Flag.D
+ if prefix == "" {
+ // Questionable, but when -D isn't specified, historically we
+ // resolve local import paths relative to the directory the
+ // compiler's current directory, not the respective source
+ // file's directory.
+ prefix = base.Ctxt.Pathname
}
- path_ = path.Join(prefix, path_)
+ path = pathpkg.Join(prefix, path)
- if isbadimport(path_, true) {
- return nil
+ if err := checkImportPath(path, true); err != nil {
+ return "", err
}
}
- file, found := findpkg(path_)
- if !found {
- base.Errorf("can't find import: %q", path_)
- base.ErrorExit()
+ return path, nil
+}
+
+// TODO(mdempsky): Return an error instead.
+func importfile(decl *syntax.ImportDecl) *types.Pkg {
+ if decl.Path.Kind != syntax.StringLit {
+ base.Errorf("import path must be a string")
+ return nil
}
- importpkg := types.NewPkg(path_, "")
- if importpkg.Imported {
- return importpkg
+ path, err := strconv.Unquote(decl.Path.Value)
+ if err != nil {
+ base.Errorf("import path must be a string")
+ return nil
+ }
+
+ if err := checkImportPath(path, false); err != nil {
+ base.Errorf("%s", err.Error())
+ return nil
}
- importpkg.Imported = true
+ path, err = resolveImportPath(path)
+ if err != nil {
+ base.Errorf("%s", err)
+ return nil
+ }
+
+ importpkg := types.NewPkg(path, "")
+ if importpkg.Direct {
+ return importpkg // already fully loaded
+ }
+ importpkg.Direct = true
+ typecheck.Target.Imports = append(typecheck.Target.Imports, importpkg)
+
+ if path == "unsafe" {
+ return importpkg // initialized with universe
+ }
- imp, err := bio.Open(file)
+ f, err := openPackage(path)
if err != nil {
- base.Errorf("can't open import: %q: %v", path_, err)
+ base.Errorf("could not import %q: %v", path, err)
base.ErrorExit()
}
+ imp := bio.NewReader(f)
defer imp.Close()
+ file := f.Name()
// check object header
p, err := imp.ReadString('\n')
@@ -261,12 +260,12 @@ func importfile(f constant.Value) *types.Pkg {
var fingerprint goobj.FingerprintType
switch c {
case '\n':
- base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path_)
+ base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path)
return nil
case 'B':
if base.Debug.Export != 0 {
- fmt.Printf("importing %s (%s)\n", path_, file)
+ fmt.Printf("importing %s (%s)\n", path, file)
}
imp.ReadByte() // skip \n after $$B
@@ -285,17 +284,17 @@ func importfile(f constant.Value) *types.Pkg {
fingerprint = typecheck.ReadImports(importpkg, imp)
default:
- base.Errorf("no import in %q", path_)
+ base.Errorf("no import in %q", path)
base.ErrorExit()
}
// assume files move (get installed) so don't record the full path
if base.Flag.Cfg.PackageFile != nil {
// If using a packageFile map, assume path_ can be recorded directly.
- base.Ctxt.AddImport(path_, fingerprint)
+ base.Ctxt.AddImport(path, fingerprint)
} else {
// For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a".
- base.Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint)
+ base.Ctxt.AddImport(file[len(file)-len(path)-len(".a"):], fingerprint)
}
if importpkg.Height >= myheight {
@@ -315,47 +314,37 @@ var reservedimports = []string{
"type",
}
-func isbadimport(path string, allowSpace bool) bool {
+func checkImportPath(path string, allowSpace bool) error {
+ if path == "" {
+ return errors.New("import path is empty")
+ }
+
if strings.Contains(path, "\x00") {
- base.Errorf("import path contains NUL")
- return true
+ return errors.New("import path contains NUL")
}
for _, ri := range reservedimports {
if path == ri {
- base.Errorf("import path %q is reserved and cannot be used", path)
- return true
+ return fmt.Errorf("import path %q is reserved and cannot be used", path)
}
}
for _, r := range path {
- if r == utf8.RuneError {
- base.Errorf("import path contains invalid UTF-8 sequence: %q", path)
- return true
- }
-
- if r < 0x20 || r == 0x7f {
- base.Errorf("import path contains control character: %q", path)
- return true
- }
-
- if r == '\\' {
- base.Errorf("import path contains backslash; use slash: %q", path)
- return true
- }
-
- if !allowSpace && unicode.IsSpace(r) {
- base.Errorf("import path contains space character: %q", path)
- return true
- }
-
- if strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r) {
- base.Errorf("import path contains invalid character '%c': %q", r, path)
- return true
+ switch {
+ case r == utf8.RuneError:
+ return fmt.Errorf("import path contains invalid UTF-8 sequence: %q", path)
+ case r < 0x20 || r == 0x7f:
+ return fmt.Errorf("import path contains control character: %q", path)
+ case r == '\\':
+ return fmt.Errorf("import path contains backslash; use slash: %q", path)
+ case !allowSpace && unicode.IsSpace(r):
+ return fmt.Errorf("import path contains space character: %q", path)
+ case strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r):
+ return fmt.Errorf("import path contains invalid character '%c': %q", r, path)
}
}
- return false
+ return nil
}
func pkgnotused(lineno src.XPos, path string, name string) {
diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go
index 5bb01895cc..6aab18549a 100644
--- a/src/cmd/compile/internal/noder/noder.go
+++ b/src/cmd/compile/internal/noder/noder.go
@@ -17,6 +17,7 @@ import (
"unicode/utf8"
"cmd/compile/internal/base"
+ "cmd/compile/internal/dwarfgen"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
@@ -27,40 +28,26 @@ import (
func LoadPackage(filenames []string) {
base.Timer.Start("fe", "parse")
- lines := ParseFiles(filenames)
- base.Timer.Stop()
- base.Timer.AddEvent(int64(lines), "lines")
- // Typecheck.
- Package()
+ mode := syntax.CheckBranches
- // With all user code typechecked, it's now safe to verify unused dot imports.
- CheckDotImports()
- base.ExitIfErrors()
-}
-
-// ParseFiles concurrently parses files into *syntax.File structures.
-// Each declaration in every *syntax.File is converted to a syntax tree
-// and its root represented by *Node is appended to Target.Decls.
-// Returns the total count of parsed lines.
-func ParseFiles(filenames []string) uint {
- noders := make([]*noder, 0, len(filenames))
// Limit the number of simultaneously open files.
sem := make(chan struct{}, runtime.GOMAXPROCS(0)+10)
- for _, filename := range filenames {
- p := &noder{
- basemap: make(map[*syntax.PosBase]*src.PosBase),
+ noders := make([]*noder, len(filenames))
+ for i, filename := range filenames {
+ p := noder{
err: make(chan syntax.Error),
trackScopes: base.Flag.Dwarf,
}
- noders = append(noders, p)
+ noders[i] = &p
- go func(filename string) {
+ filename := filename
+ go func() {
sem <- struct{}{}
defer func() { <-sem }()
defer close(p.err)
- base := syntax.NewFileBase(filename)
+ fbase := syntax.NewFileBase(filename)
f, err := os.Open(filename)
if err != nil {
@@ -69,8 +56,8 @@ func ParseFiles(filenames []string) uint {
}
defer f.Close()
- p.file, _ = syntax.Parse(base, f, p.error, p.pragma, syntax.CheckBranches) // errors are tracked via p.error
- }(filename)
+ p.file, _ = syntax.Parse(fbase, f, p.error, p.pragma, mode) // errors are tracked via p.error
+ }()
}
var lines uint
@@ -78,30 +65,27 @@ func ParseFiles(filenames []string) uint {
for e := range p.err {
p.errorAt(e.Pos, "%s", e.Msg)
}
+ lines += p.file.Lines
+ }
+ base.Timer.AddEvent(int64(lines), "lines")
+ for _, p := range noders {
p.node()
- lines += p.file.Lines
p.file = nil // release memory
+ }
- if base.SyntaxErrors() != 0 {
- base.ErrorExit()
- }
- // Always run CheckDclstack here, even when debug_dclstack is not set, as a sanity measure.
- types.CheckDclstack()
+ if base.SyntaxErrors() != 0 {
+ base.ErrorExit()
}
+ types.CheckDclstack()
for _, p := range noders {
p.processPragmas()
}
+ // Typecheck.
types.LocalPkg.Height = myheight
-
- return lines
-}
-
-func Package() {
typecheck.DeclareUniverse()
-
typecheck.TypecheckAllowed = true
// Process top-level declarations in phases.
@@ -166,44 +150,10 @@ func Package() {
}
// Phase 5: With all user code type-checked, it's now safe to verify map keys.
+ // With all user code typechecked, it's now safe to verify unused dot imports.
typecheck.CheckMapKeys()
-
-}
-
-// makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase.
-func (p *noder) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
- // fast path: most likely PosBase hasn't changed
- if p.basecache.last == b0 {
- return p.basecache.base
- }
-
- b1, ok := p.basemap[b0]
- if !ok {
- fn := b0.Filename()
- if b0.IsFileBase() {
- b1 = src.NewFileBase(fn, absFilename(fn))
- } else {
- // line directive base
- p0 := b0.Pos()
- p0b := p0.Base()
- if p0b == b0 {
- panic("infinite recursion in makeSrcPosBase")
- }
- p1 := src.MakePos(p.makeSrcPosBase(p0b), p0.Line(), p0.Col())
- b1 = src.NewLinePragmaBase(p1, fn, fileh(fn), b0.Line(), b0.Col())
- }
- p.basemap[b0] = b1
- }
-
- // update cache
- p.basecache.last = b0
- p.basecache.base = b1
-
- return b1
-}
-
-func (p *noder) makeXPos(pos syntax.Pos) (_ src.XPos) {
- return base.Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col()))
+ CheckDotImports()
+ base.ExitIfErrors()
}
func (p *noder) errorAt(pos syntax.Pos, format string, args ...interface{}) {
@@ -221,31 +171,33 @@ func absFilename(name string) string {
// noder transforms package syntax's AST into a Node tree.
type noder struct {
- basemap map[*syntax.PosBase]*src.PosBase
- basecache struct {
- last *syntax.PosBase
- base *src.PosBase
- }
+ posMap
file *syntax.File
linknames []linkname
pragcgobuf [][]string
err chan syntax.Error
- scope ir.ScopeID
importedUnsafe bool
importedEmbed bool
+ trackScopes bool
- // scopeVars is a stack tracking the number of variables declared in the
- // current function at the moment each open scope was opened.
- trackScopes bool
- scopeVars []int
+ funcState *funcState
+}
+
+// funcState tracks all per-function state to make handling nested
+// functions easier.
+type funcState struct {
+ // scopeVars is a stack tracking the number of variables declared in
+ // the current function at the moment each open scope was opened.
+ scopeVars []int
+ marker dwarfgen.ScopeMarker
lastCloseScopePos syntax.Pos
}
func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
- oldScope := p.scope
- p.scope = 0
+ outerFuncState := p.funcState
+ p.funcState = new(funcState)
typecheck.StartFuncBody(fn)
if block != nil {
@@ -260,62 +212,34 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
}
typecheck.FinishFuncBody()
- p.scope = oldScope
+ p.funcState.marker.WriteTo(fn)
+ p.funcState = outerFuncState
}
func (p *noder) openScope(pos syntax.Pos) {
+ fs := p.funcState
types.Markdcl()
if p.trackScopes {
- ir.CurFunc.Parents = append(ir.CurFunc.Parents, p.scope)
- p.scopeVars = append(p.scopeVars, len(ir.CurFunc.Dcl))
- p.scope = ir.ScopeID(len(ir.CurFunc.Parents))
-
- p.markScope(pos)
+ fs.scopeVars = append(fs.scopeVars, len(ir.CurFunc.Dcl))
+ fs.marker.Push(p.makeXPos(pos))
}
}
func (p *noder) closeScope(pos syntax.Pos) {
- p.lastCloseScopePos = pos
+ fs := p.funcState
+ fs.lastCloseScopePos = pos
types.Popdcl()
if p.trackScopes {
- scopeVars := p.scopeVars[len(p.scopeVars)-1]
- p.scopeVars = p.scopeVars[:len(p.scopeVars)-1]
+ scopeVars := fs.scopeVars[len(fs.scopeVars)-1]
+ fs.scopeVars = fs.scopeVars[:len(fs.scopeVars)-1]
if scopeVars == len(ir.CurFunc.Dcl) {
// no variables were declared in this scope, so we can retract it.
-
- if int(p.scope) != len(ir.CurFunc.Parents) {
- base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
- }
-
- p.scope = ir.CurFunc.Parents[p.scope-1]
- ir.CurFunc.Parents = ir.CurFunc.Parents[:len(ir.CurFunc.Parents)-1]
-
- nmarks := len(ir.CurFunc.Marks)
- ir.CurFunc.Marks[nmarks-1].Scope = p.scope
- prevScope := ir.ScopeID(0)
- if nmarks >= 2 {
- prevScope = ir.CurFunc.Marks[nmarks-2].Scope
- }
- if ir.CurFunc.Marks[nmarks-1].Scope == prevScope {
- ir.CurFunc.Marks = ir.CurFunc.Marks[:nmarks-1]
- }
- return
+ fs.marker.Unpush()
+ } else {
+ fs.marker.Pop(p.makeXPos(pos))
}
-
- p.scope = ir.CurFunc.Parents[p.scope-1]
-
- p.markScope(pos)
- }
-}
-
-func (p *noder) markScope(pos syntax.Pos) {
- xpos := p.makeXPos(pos)
- if i := len(ir.CurFunc.Marks); i > 0 && ir.CurFunc.Marks[i-1].Pos == xpos {
- ir.CurFunc.Marks[i-1].Scope = p.scope
- } else {
- ir.CurFunc.Marks = append(ir.CurFunc.Marks, ir.Mark{Pos: xpos, Scope: p.scope})
}
}
@@ -324,7 +248,7 @@ func (p *noder) markScope(pos syntax.Pos) {
// "if" statements, as their implicit blocks always end at the same
// position as an explicit block.
func (p *noder) closeAnotherScope() {
- p.closeScope(p.lastCloseScopePos)
+ p.closeScope(p.funcState.lastCloseScopePos)
}
// linkname records a //go:linkname directive.
@@ -335,7 +259,6 @@ type linkname struct {
}
func (p *noder) node() {
- types.Block = 1
p.importedUnsafe = false
p.importedEmbed = false
@@ -404,7 +327,7 @@ func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
}
func (p *noder) importDecl(imp *syntax.ImportDecl) {
- if imp.Path.Bad {
+ if imp.Path == nil || imp.Path.Bad {
return // avoid follow-on errors if there was a syntax error
}
@@ -412,7 +335,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
p.checkUnused(pragma)
}
- ipkg := importfile(p.basicLit(imp.Path))
+ ipkg := importfile(imp)
if ipkg == nil {
if base.Errors() == 0 {
base.Fatalf("phase error in import")
@@ -427,11 +350,6 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
p.importedEmbed = true
}
- if !ipkg.Direct {
- typecheck.Target.Imports = append(typecheck.Target.Imports, ipkg)
- }
- ipkg.Direct = true
-
var my *types.Sym
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
@@ -465,20 +383,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
exprs := p.exprList(decl.Values)
if pragma, ok := decl.Pragma.(*pragmas); ok {
- if len(pragma.Embeds) > 0 {
- if !p.importedEmbed {
- // This check can't be done when building the list pragma.Embeds
- // because that list is created before the noder starts walking over the file,
- // so at that point it hasn't seen the imports.
- // We're left to check now, just before applying the //go:embed lines.
- for _, e := range pragma.Embeds {
- p.errorAt(e.Pos, "//go:embed only allowed in Go files that import \"embed\"")
- }
- } else {
- varEmbed(p, names, typ, exprs, pragma.Embeds)
- }
- pragma.Embeds = nil
- }
+ varEmbed(p.makeXPos, names[0], decl, pragma, p.importedEmbed)
p.checkUnused(pragma)
}
@@ -1126,9 +1031,16 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
case *syntax.DeclStmt:
return ir.NewBlockStmt(src.NoXPos, p.decls(stmt.DeclList))
case *syntax.AssignStmt:
+ if stmt.Rhs == syntax.ImplicitOne {
+ one := constant.MakeInt64(1)
+ pos := p.pos(stmt)
+ n := ir.NewAssignOpStmt(pos, p.binOp(stmt.Op), p.expr(stmt.Lhs), ir.NewBasicLit(pos, one))
+ n.IncDec = true
+ return n
+ }
+
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := ir.NewAssignOpStmt(p.pos(stmt), p.binOp(stmt.Op), p.expr(stmt.Lhs), p.expr(stmt.Rhs))
- n.IncDec = stmt.Rhs == syntax.ImplicitOne
return n
}
@@ -1588,15 +1500,6 @@ func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
return x
}
-func (p *noder) pos(n syntax.Node) src.XPos {
- // TODO(gri): orig.Pos() should always be known - fix package syntax
- xpos := base.Pos
- if pos := n.Pos(); pos.IsKnown() {
- xpos = p.makeXPos(pos)
- }
- return xpos
-}
-
func (p *noder) setlineno(n syntax.Node) {
if n != nil {
base.Pos = p.pos(n)
@@ -1923,48 +1826,41 @@ func oldname(s *types.Sym) ir.Node {
return n
}
-func varEmbed(p *noder, names []*ir.Name, typ ir.Ntype, exprs []ir.Node, embeds []pragmaEmbed) {
- haveEmbed := false
- for _, decl := range p.file.DeclList {
- imp, ok := decl.(*syntax.ImportDecl)
- if !ok {
- // imports always come first
- break
- }
- path, _ := strconv.Unquote(imp.Path.Value)
- if path == "embed" {
- haveEmbed = true
- break
- }
+func varEmbed(makeXPos func(syntax.Pos) src.XPos, name *ir.Name, decl *syntax.VarDecl, pragma *pragmas, haveEmbed bool) {
+ if pragma.Embeds == nil {
+ return
}
- pos := embeds[0].Pos
+ pragmaEmbeds := pragma.Embeds
+ pragma.Embeds = nil
+ pos := makeXPos(pragmaEmbeds[0].Pos)
+
if !haveEmbed {
- p.errorAt(pos, "invalid go:embed: missing import \"embed\"")
+ base.ErrorfAt(pos, "go:embed only allowed in Go files that import \"embed\"")
return
}
- if len(names) > 1 {
- p.errorAt(pos, "go:embed cannot apply to multiple vars")
+ if len(decl.NameList) > 1 {
+ base.ErrorfAt(pos, "go:embed cannot apply to multiple vars")
return
}
- if len(exprs) > 0 {
- p.errorAt(pos, "go:embed cannot apply to var with initializer")
+ if decl.Values != nil {
+ base.ErrorfAt(pos, "go:embed cannot apply to var with initializer")
return
}
- if typ == nil {
- // Should not happen, since len(exprs) == 0 now.
- p.errorAt(pos, "go:embed cannot apply to var without type")
+ if decl.Type == nil {
+ // Should not happen, since Values == nil now.
+ base.ErrorfAt(pos, "go:embed cannot apply to var without type")
return
}
if typecheck.DeclContext != ir.PEXTERN {
- p.errorAt(pos, "go:embed cannot apply to var inside func")
+ base.ErrorfAt(pos, "go:embed cannot apply to var inside func")
return
}
- v := names[0]
- typecheck.Target.Embeds = append(typecheck.Target.Embeds, v)
- v.Embed = new([]ir.Embed)
- for _, e := range embeds {
- *v.Embed = append(*v.Embed, ir.Embed{Pos: p.makeXPos(e.Pos), Patterns: e.Patterns})
+ var embeds []ir.Embed
+ for _, e := range pragmaEmbeds {
+ embeds = append(embeds, ir.Embed{Pos: makeXPos(e.Pos), Patterns: e.Patterns})
}
+ typecheck.Target.Embeds = append(typecheck.Target.Embeds, name)
+ name.Embed = &embeds
}
diff --git a/src/cmd/compile/internal/noder/posmap.go b/src/cmd/compile/internal/noder/posmap.go
new file mode 100644
index 0000000000..a6d3e2d7ef
--- /dev/null
+++ b/src/cmd/compile/internal/noder/posmap.go
@@ -0,0 +1,83 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/syntax"
+ "cmd/internal/src"
+)
+
+// A posMap handles mapping from syntax.Pos to src.XPos.
+type posMap struct {
+ bases map[*syntax.PosBase]*src.PosBase
+ cache struct {
+ last *syntax.PosBase
+ base *src.PosBase
+ }
+}
+
+type poser interface{ Pos() syntax.Pos }
+type ender interface{ End() syntax.Pos }
+
+func (m *posMap) pos(p poser) src.XPos { return m.makeXPos(p.Pos()) }
+func (m *posMap) end(p ender) src.XPos { return m.makeXPos(p.End()) }
+
+func (m *posMap) makeXPos(pos syntax.Pos) src.XPos {
+ if !pos.IsKnown() {
+ // TODO(mdempsky): Investigate restoring base.Fatalf.
+ return src.NoXPos
+ }
+
+ posBase := m.makeSrcPosBase(pos.Base())
+ return base.Ctxt.PosTable.XPos(src.MakePos(posBase, pos.Line(), pos.Col()))
+}
+
+// makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase.
+func (m *posMap) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
+ // fast path: most likely PosBase hasn't changed
+ if m.cache.last == b0 {
+ return m.cache.base
+ }
+
+ b1, ok := m.bases[b0]
+ if !ok {
+ fn := b0.Filename()
+ if b0.IsFileBase() {
+ b1 = src.NewFileBase(fn, absFilename(fn))
+ } else {
+ // line directive base
+ p0 := b0.Pos()
+ p0b := p0.Base()
+ if p0b == b0 {
+ panic("infinite recursion in makeSrcPosBase")
+ }
+ p1 := src.MakePos(m.makeSrcPosBase(p0b), p0.Line(), p0.Col())
+ b1 = src.NewLinePragmaBase(p1, fn, fileh(fn), b0.Line(), b0.Col())
+ }
+ if m.bases == nil {
+ m.bases = make(map[*syntax.PosBase]*src.PosBase)
+ }
+ m.bases[b0] = b1
+ }
+
+ // update cache
+ m.cache.last = b0
+ m.cache.base = b1
+
+ return b1
+}
+
+func (m *posMap) join(other *posMap) {
+ if m.bases == nil {
+ m.bases = make(map[*syntax.PosBase]*src.PosBase)
+ }
+ for k, v := range other.bases {
+ if m.bases[k] != nil {
+ base.Fatalf("duplicate posmap bases")
+ }
+ m.bases[k] = v
+ }
+}