aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 00:05:23 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:37:55 +0000
commitead4957892bc1975d9cc9c32777733c67e5a885e (patch)
tree832d82f1ea11b03243900acb62af2cbe88922762 /src/cmd/compile
parent440308ffd7061e0eb386a9a8469575528b41dcd4 (diff)
downloadgo-ead4957892bc1975d9cc9c32777733c67e5a885e.tar.gz
go-ead4957892bc1975d9cc9c32777733c67e5a885e.zip
[dev.regabi] cmd/compile: move helpers into package base [generated]
[git-generate] cd src/cmd/compile/internal/gc rf ' # Move EnableTrace constant into base, with the other flags. mv enableTrace EnableTrace mv EnableTrace base.go # Move compilation checks to base. mv instrumenting Instrumenting mv ispkgin Compiling mv omit_pkgs NoInstrumentPkgs mv norace_inst_pkgs NoRacePkgs mv Instrumenting Compiling NoInstrumentPkgs NoRacePkgs base.go # Move AutogeneratedPos to package base, next to Pos. mv autogeneratedPos AutogeneratedPos mv AutogeneratedPos print.go mv timings Timer mv base.go print.go timings.go cmd/compile/internal/base ' cd ../base rf ' mv Instrumenting Flag.Cfg.Instrumenting ' Change-Id: I534437fa75857d31531fc499d833c9930c0a06d0 Reviewed-on: https://go-review.googlesource.com/c/go/+/279420 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile')
-rw-r--r--src/cmd/compile/internal/base/base.go51
-rw-r--r--src/cmd/compile/internal/base/flag.go3
-rw-r--r--src/cmd/compile/internal/base/print.go2
-rw-r--r--src/cmd/compile/internal/base/timings.go (renamed from src/cmd/compile/internal/gc/timings.go)4
-rw-r--r--src/cmd/compile/internal/gc/alg.go4
-rw-r--r--src/cmd/compile/internal/gc/align.go2
-rw-r--r--src/cmd/compile/internal/gc/go.go7
-rw-r--r--src/cmd/compile/internal/gc/gsubr.go2
-rw-r--r--src/cmd/compile/internal/gc/inl.go2
-rw-r--r--src/cmd/compile/internal/gc/main.go32
-rw-r--r--src/cmd/compile/internal/gc/order.go16
-rw-r--r--src/cmd/compile/internal/gc/racewalk.go50
-rw-r--r--src/cmd/compile/internal/gc/range.go4
-rw-r--r--src/cmd/compile/internal/gc/ssa.go4
-rw-r--r--src/cmd/compile/internal/gc/subr.go6
-rw-r--r--src/cmd/compile/internal/gc/typecheck.go35
-rw-r--r--src/cmd/compile/internal/gc/walk.go18
17 files changed, 120 insertions, 122 deletions
diff --git a/src/cmd/compile/internal/base/base.go b/src/cmd/compile/internal/base/base.go
index e26b378472..5a30fa6a33 100644
--- a/src/cmd/compile/internal/base/base.go
+++ b/src/cmd/compile/internal/base/base.go
@@ -26,3 +26,54 @@ func Exit(code int) {
}
os.Exit(code)
}
+
+// To enable tracing support (-t flag), set EnableTrace to true.
+const EnableTrace = false
+
+func Compiling(pkgs []string) bool {
+ if Ctxt.Pkgpath != "" {
+ for _, p := range pkgs {
+ if Ctxt.Pkgpath == p {
+ return true
+ }
+ }
+ }
+
+ return false
+}
+
+// The racewalk pass is currently handled in three parts.
+//
+// First, for flag_race, it inserts calls to racefuncenter and
+// racefuncexit at the start and end (respectively) of each
+// function. This is handled below.
+//
+// Second, during buildssa, it inserts appropriate instrumentation
+// calls immediately before each memory load or store. This is handled
+// by the (*state).instrument method in ssa.go, so here we just set
+// the Func.InstrumentBody flag as needed. For background on why this
+// is done during SSA construction rather than a separate SSA pass,
+// see issue #19054.
+//
+// Third we remove calls to racefuncenter and racefuncexit, for leaf
+// functions without instrumented operations. This is done as part of
+// ssa opt pass via special rule.
+
+// TODO(dvyukov): do not instrument initialization as writes:
+// a := make([]int, 10)
+
+// Do not instrument the following packages at all,
+// at best instrumentation would cause infinite recursion.
+var NoInstrumentPkgs = []string{
+ "runtime/internal/atomic",
+ "runtime/internal/sys",
+ "runtime/internal/math",
+ "runtime",
+ "runtime/race",
+ "runtime/msan",
+ "internal/cpu",
+}
+
+// Don't insert racefuncenterfp/racefuncexit into the following packages.
+// Memory accesses in the packages are either uninteresting or will cause false positives.
+var NoRacePkgs = []string{"sync", "sync/atomic"}
diff --git a/src/cmd/compile/internal/base/flag.go b/src/cmd/compile/internal/base/flag.go
index ce87ff730e..d35b8452f9 100644
--- a/src/cmd/compile/internal/base/flag.go
+++ b/src/cmd/compile/internal/base/flag.go
@@ -130,6 +130,9 @@ type CmdFlags struct {
ImportMap map[string]string // set by -importmap OR -importcfg
PackageFile map[string]string // set by -importcfg; nil means not in use
SpectreIndex bool // set by -spectre=index or -spectre=all
+ // Whether we are adding any sort of code instrumentation, such as
+ // when the race detector is enabled.
+ Instrumenting bool
}
}
diff --git a/src/cmd/compile/internal/base/print.go b/src/cmd/compile/internal/base/print.go
index ac7333ca4e..9855dfdad0 100644
--- a/src/cmd/compile/internal/base/print.go
+++ b/src/cmd/compile/internal/base/print.go
@@ -260,3 +260,5 @@ func ExitIfErrors() {
ErrorExit()
}
}
+
+var AutogeneratedPos src.XPos
diff --git a/src/cmd/compile/internal/gc/timings.go b/src/cmd/compile/internal/base/timings.go
index ac12d78d1e..f599f4e05f 100644
--- a/src/cmd/compile/internal/gc/timings.go
+++ b/src/cmd/compile/internal/base/timings.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package gc
+package base
import (
"fmt"
@@ -11,7 +11,7 @@ import (
"time"
)
-var timings Timings
+var Timer Timings
// Timings collects the execution times of labeled phases
// which are added trough a sequence of Start/Stop calls.
diff --git a/src/cmd/compile/internal/gc/alg.go b/src/cmd/compile/internal/gc/alg.go
index 49ce14b026..8733c6198c 100644
--- a/src/cmd/compile/internal/gc/alg.go
+++ b/src/cmd/compile/internal/gc/alg.go
@@ -288,7 +288,7 @@ func genhash(t *types.Type) *obj.LSym {
fmt.Printf("genhash %v %v %v\n", closure, sym, t)
}
- base.Pos = autogeneratedPos // less confusing than end of input
+ base.Pos = base.AutogeneratedPos // less confusing than end of input
dclcontext = ir.PEXTERN
// func sym(p *T, h uintptr) uintptr
@@ -517,7 +517,7 @@ func geneq(t *types.Type) *obj.LSym {
// Autogenerate code for equality of structs and arrays.
- base.Pos = autogeneratedPos // less confusing than end of input
+ base.Pos = base.AutogeneratedPos // less confusing than end of input
dclcontext = ir.PEXTERN
// func sym(p, q *T) bool
diff --git a/src/cmd/compile/internal/gc/align.go b/src/cmd/compile/internal/gc/align.go
index a9cf7fb50a..f2f98bd51f 100644
--- a/src/cmd/compile/internal/gc/align.go
+++ b/src/cmd/compile/internal/gc/align.go
@@ -270,7 +270,7 @@ func reportTypeLoop(t *types.Type) {
func dowidth(t *types.Type) {
// Calling dowidth when typecheck tracing enabled is not safe.
// See issue #33658.
- if enableTrace && skipDowidthForTracing {
+ if base.EnableTrace && skipDowidthForTracing {
return
}
if Widthptr == 0 {
diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go
index df91f6f530..46ddda0ba7 100644
--- a/src/cmd/compile/internal/gc/go.go
+++ b/src/cmd/compile/internal/gc/go.go
@@ -10,7 +10,6 @@ import (
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
- "cmd/internal/src"
"sync"
)
@@ -144,14 +143,8 @@ var Widthreg int
var typecheckok bool
-// Whether we are adding any sort of code instrumentation, such as
-// when the race detector is enabled.
-var instrumenting bool
-
var nodfp *ir.Name
-var autogeneratedPos src.XPos
-
// interface to back end
type Arch struct {
diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go
index f4178db477..db55b1035c 100644
--- a/src/cmd/compile/internal/gc/gsubr.go
+++ b/src/cmd/compile/internal/gc/gsubr.go
@@ -199,7 +199,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
savedclcontext := dclcontext
savedcurfn := Curfn
- base.Pos = autogeneratedPos
+ base.Pos = base.AutogeneratedPos
dclcontext = ir.PEXTERN
// At the moment we don't support wrapping a method, we'd need machinery
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go
index 2fb23f1a3f..49e0bcc470 100644
--- a/src/cmd/compile/internal/gc/inl.go
+++ b/src/cmd/compile/internal/gc/inl.go
@@ -844,7 +844,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
return n
}
- if instrumenting && isRuntimePkg(fn.Sym().Pkg) {
+ if base.Flag.Cfg.Instrumenting && isRuntimePkg(fn.Sym().Pkg) {
// Runtime package must not be instrumented.
// Instrument skips runtime package. However, some runtime code can be
// inlined into other packages and instrumented there. To avoid this,
diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go
index c1cc7ed377..feded3f9b2 100644
--- a/src/cmd/compile/internal/gc/main.go
+++ b/src/cmd/compile/internal/gc/main.go
@@ -58,7 +58,7 @@ var Target *ir.Package
// arguments, type-checks the parsed Go package, compiles functions to machine
// code, and finally writes the compiled package definition to disk.
func Main(archInit func(*Arch)) {
- timings.Start("fe", "init")
+ base.Timer.Start("fe", "init")
defer hidePanic()
@@ -123,7 +123,7 @@ func Main(archInit func(*Arch)) {
// changes in the binary.)
recordFlags("B", "N", "l", "msan", "race", "shared", "dynlink", "dwarflocationlists", "dwarfbasentries", "smallframes", "spectre")
- if !enableTrace && base.Flag.LowerT {
+ if !base.EnableTrace && base.Flag.LowerT {
log.Fatalf("compiler not built with support for -t")
}
@@ -159,7 +159,7 @@ func Main(archInit func(*Arch)) {
readSymABIs(base.Flag.SymABIs, base.Ctxt.Pkgpath)
}
- if ispkgin(omit_pkgs) {
+ if base.Compiling(base.NoInstrumentPkgs) {
base.Flag.Race = false
base.Flag.MSan = false
}
@@ -173,7 +173,7 @@ func Main(archInit func(*Arch)) {
msanpkg = types.NewPkg("runtime/msan", "")
}
if base.Flag.Race || base.Flag.MSan {
- instrumenting = true
+ base.Flag.Cfg.Instrumenting = true
}
if base.Flag.Dwarf {
dwarf.EnableLogging(base.Debug.DwarfInl != 0)
@@ -205,7 +205,7 @@ func Main(archInit func(*Arch)) {
NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
- autogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
+ base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
@@ -213,11 +213,11 @@ func Main(archInit func(*Arch)) {
TypecheckInit()
// Parse input.
- timings.Start("fe", "parse")
+ base.Timer.Start("fe", "parse")
lines := parseFiles(flag.Args())
cgoSymABIs()
- timings.Stop()
- timings.AddEvent(int64(lines), "lines")
+ base.Timer.Stop()
+ base.Timer.AddEvent(int64(lines), "lines")
recordPackageName()
// Typecheck.
@@ -233,7 +233,7 @@ func Main(archInit func(*Arch)) {
}
// Inlining
- timings.Start("fe", "inlining")
+ base.Timer.Start("fe", "inlining")
if base.Flag.LowerL != 0 {
InlinePackage()
}
@@ -254,7 +254,7 @@ func Main(archInit func(*Arch)) {
// or else the stack copier will not update it.
// Large values are also moved off stack in escape analysis;
// because large values may contain pointers, it must happen early.
- timings.Start("fe", "escapes")
+ base.Timer.Start("fe", "escapes")
escapes(Target.Decls)
// Collect information for go:nowritebarrierrec
@@ -268,7 +268,7 @@ func Main(archInit func(*Arch)) {
// Transform closure bodies to properly reference captured variables.
// This needs to happen before walk, because closures must be transformed
// before walk reaches a call of a closure.
- timings.Start("fe", "xclosures")
+ base.Timer.Start("fe", "xclosures")
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
@@ -292,7 +292,7 @@ func Main(archInit func(*Arch)) {
// Compile top level functions.
// Don't use range--walk can add functions to Target.Decls.
- timings.Start("be", "compilefuncs")
+ base.Timer.Start("be", "compilefuncs")
fcount := int64(0)
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
@@ -301,7 +301,7 @@ func Main(archInit func(*Arch)) {
fcount++
}
}
- timings.AddEvent(fcount, "funcs")
+ base.Timer.AddEvent(fcount, "funcs")
compileFunctions()
@@ -320,7 +320,7 @@ func Main(archInit func(*Arch)) {
}
// Write object data to disk.
- timings.Start("be", "dumpobj")
+ base.Timer.Start("be", "dumpobj")
dumpdata()
base.Ctxt.NumberSyms()
dumpobj()
@@ -339,7 +339,7 @@ func Main(archInit func(*Arch)) {
base.ExitIfErrors()
base.FlushErrors()
- timings.Stop()
+ base.Timer.Stop()
if base.Flag.Bench != "" {
if err := writebench(base.Flag.Bench); err != nil {
@@ -397,7 +397,7 @@ func writebench(filename string) error {
fmt.Fprintln(&buf, "commit:", objabi.Version)
fmt.Fprintln(&buf, "goos:", runtime.GOOS)
fmt.Fprintln(&buf, "goarch:", runtime.GOARCH)
- timings.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":")
+ base.Timer.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":")
n, err := f.Write(buf.Bytes())
if err != nil {
diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go
index 45a2e2a43e..738b403b99 100644
--- a/src/cmd/compile/internal/gc/order.go
+++ b/src/cmd/compile/internal/gc/order.go
@@ -362,7 +362,7 @@ func (o *Order) stmtList(l ir.Nodes) {
// and rewrites it to:
// m = OMAKESLICECOPY([]T, x, s); nil
func orderMakeSliceCopy(s []ir.Node) {
- if base.Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
return
}
if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
@@ -580,7 +580,7 @@ func (o *Order) mapAssign(n ir.Node) {
m.Index = o.copyExpr(m.Index)
}
fallthrough
- case instrumenting && n.Op() == ir.OAS2FUNC && !ir.IsBlank(m):
+ case base.Flag.Cfg.Instrumenting && n.Op() == ir.OAS2FUNC && !ir.IsBlank(m):
t := o.newTemp(m.Type(), false)
n.Lhs[i] = t
a := ir.NewAssignStmt(base.Pos, m, t)
@@ -639,7 +639,7 @@ func (o *Order) stmt(n ir.Node) {
n.X = o.expr(n.X, nil)
n.Y = o.expr(n.Y, nil)
- if instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
+ if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
// Rewrite m[k] op= r into m[k] = m[k] op r so
// that we can ensure that if op panics
// because r is zero, the panic happens before
@@ -1008,7 +1008,7 @@ func (o *Order) stmt(n ir.Node) {
t := o.markTemp()
n.Chan = o.expr(n.Chan, nil)
n.Value = o.expr(n.Value, nil)
- if instrumenting {
+ if base.Flag.Cfg.Instrumenting {
// Force copying to the stack so that (chan T)(nil) <- x
// is still instrumented as a read of x.
n.Value = o.copyExpr(n.Value)
@@ -1156,7 +1156,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// conversions. See copyExpr a few lines below.
needCopy = mapKeyReplaceStrConv(n.Index)
- if instrumenting {
+ if base.Flag.Cfg.Instrumenting {
// Race detector needs the copy.
needCopy = true
}
@@ -1194,7 +1194,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// together. See golang.org/issue/15329.
o.init(call)
o.call(call)
- if lhs == nil || lhs.Op() != ir.ONAME || instrumenting {
+ if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
return o.copyExpr(n)
}
} else {
@@ -1267,7 +1267,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
o.call(n)
}
- if lhs == nil || lhs.Op() != ir.ONAME || instrumenting {
+ if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
return o.copyExpr(n)
}
return n
@@ -1332,7 +1332,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
n.X = o.expr(n.X, nil)
- if !isdirectiface(n.Type()) || instrumenting {
+ if !isdirectiface(n.Type()) || base.Flag.Cfg.Instrumenting {
return o.copyExprClear(n)
}
return n
diff --git a/src/cmd/compile/internal/gc/racewalk.go b/src/cmd/compile/internal/gc/racewalk.go
index 61a65368af..67802fe917 100644
--- a/src/cmd/compile/internal/gc/racewalk.go
+++ b/src/cmd/compile/internal/gc/racewalk.go
@@ -12,60 +12,12 @@ import (
"cmd/internal/sys"
)
-// The racewalk pass is currently handled in three parts.
-//
-// First, for flag_race, it inserts calls to racefuncenter and
-// racefuncexit at the start and end (respectively) of each
-// function. This is handled below.
-//
-// Second, during buildssa, it inserts appropriate instrumentation
-// calls immediately before each memory load or store. This is handled
-// by the (*state).instrument method in ssa.go, so here we just set
-// the Func.InstrumentBody flag as needed. For background on why this
-// is done during SSA construction rather than a separate SSA pass,
-// see issue #19054.
-//
-// Third we remove calls to racefuncenter and racefuncexit, for leaf
-// functions without instrumented operations. This is done as part of
-// ssa opt pass via special rule.
-
-// TODO(dvyukov): do not instrument initialization as writes:
-// a := make([]int, 10)
-
-// Do not instrument the following packages at all,
-// at best instrumentation would cause infinite recursion.
-var omit_pkgs = []string{
- "runtime/internal/atomic",
- "runtime/internal/sys",
- "runtime/internal/math",
- "runtime",
- "runtime/race",
- "runtime/msan",
- "internal/cpu",
-}
-
-// Don't insert racefuncenterfp/racefuncexit into the following packages.
-// Memory accesses in the packages are either uninteresting or will cause false positives.
-var norace_inst_pkgs = []string{"sync", "sync/atomic"}
-
-func ispkgin(pkgs []string) bool {
- if base.Ctxt.Pkgpath != "" {
- for _, p := range pkgs {
- if base.Ctxt.Pkgpath == p {
- return true
- }
- }
- }
-
- return false
-}
-
func instrument(fn *ir.Func) {
if fn.Pragma&ir.Norace != 0 || (fn.Sym().Linksym() != nil && fn.Sym().Linksym().ABIWrapper()) {
return
}
- if !base.Flag.Race || !ispkgin(norace_inst_pkgs) {
+ if !base.Flag.Race || !base.Compiling(base.NoRacePkgs) {
fn.SetInstrumentBody(true)
}
diff --git a/src/cmd/compile/internal/gc/range.go b/src/cmd/compile/internal/gc/range.go
index 4d2964591b..078f03bc68 100644
--- a/src/cmd/compile/internal/gc/range.go
+++ b/src/cmd/compile/internal/gc/range.go
@@ -460,7 +460,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
//
// where == for keys of map m is reflexive.
func isMapClear(n *ir.RangeStmt) bool {
- if base.Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
return false
}
@@ -523,7 +523,7 @@ func mapClear(m ir.Node) ir.Node {
//
// Parameters are as in walkrange: "for v1, v2 = range a".
func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
- if base.Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
return nil
}
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index 6993b4b1c7..0bca2baa17 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -2281,7 +2281,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return nil
}
- if instrumenting {
+ if base.Flag.Cfg.Instrumenting {
// These appear to be fine, but they fail the
// integer constraint below, so okay them here.
// Sample non-integer conversion: map[string]string -> *uint8
@@ -3490,7 +3490,7 @@ func initSSATables() {
}
/******** runtime ********/
- if !instrumenting {
+ if !base.Flag.Cfg.Instrumenting {
add("runtime", "slicebytetostringtmp",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
// Compiler frontend optimizations emit OBYTES2STRTMP nodes
diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go
index 59763824fb..6e130d4889 100644
--- a/src/cmd/compile/internal/gc/subr.go
+++ b/src/cmd/compile/internal/gc/subr.go
@@ -613,7 +613,7 @@ func calcHasCall(n ir.Node) bool {
case ir.OANDAND, ir.OOROR:
// hard with instrumented code
n := n.(*ir.LogicalExpr)
- if instrumenting {
+ if base.Flag.Cfg.Instrumenting {
return true
}
return n.X.HasCall() || n.Y.HasCall()
@@ -1209,7 +1209,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
return
}
- base.Pos = autogeneratedPos
+ base.Pos = base.AutogeneratedPos
dclcontext = ir.PEXTERN
tfn := ir.NewFuncType(base.Pos,
@@ -1243,7 +1243,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// the TOC to the appropriate value for that module. But if it returns
// directly to the wrapper's caller, nothing will reset it to the correct
// value for that function.
- if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
+ if !base.Flag.Cfg.Instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
// generate tail call: adjust pointer receiver and jump to embedded method.
left := dot.X // skip final .M
if !left.Type().IsPtr() {
diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go
index f2e5728d80..4f1fe240ec 100644
--- a/src/cmd/compile/internal/gc/typecheck.go
+++ b/src/cmd/compile/internal/gc/typecheck.go
@@ -25,7 +25,7 @@ func TypecheckInit() {
types.Dowidth = dowidth
initUniverse()
dclcontext = ir.PEXTERN
- timings.Start("fe", "loadsys")
+ base.Timer.Start("fe", "loadsys")
loadsys()
}
@@ -45,7 +45,7 @@ func TypecheckPackage() {
// TODO(gri) Remove this again once we have a fix for #25838.
// Don't use range--typecheck can add closures to Target.Decls.
- timings.Start("fe", "typecheck", "top1")
+ base.Timer.Start("fe", "typecheck", "top1")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) {
@@ -57,7 +57,7 @@ func TypecheckPackage() {
// To check interface assignments, depends on phase 1.
// Don't use range--typecheck can add closures to Target.Decls.
- timings.Start("fe", "typecheck", "top2")
+ base.Timer.Start("fe", "typecheck", "top2")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() {
@@ -67,7 +67,7 @@ func TypecheckPackage() {
// Phase 3: Type check function bodies.
// Don't use range--typecheck can add closures to Target.Decls.
- timings.Start("fe", "typecheck", "func")
+ base.Timer.Start("fe", "typecheck", "func")
var fcount int64
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
@@ -80,7 +80,7 @@ func TypecheckPackage() {
// Phase 4: Check external declarations.
// TODO(mdempsky): This should be handled when type checking their
// corresponding ODCL nodes.
- timings.Start("fe", "typecheck", "externdcls")
+ base.Timer.Start("fe", "typecheck", "externdcls")
for i, n := range Target.Externs {
if n.Op() == ir.ONAME {
Target.Externs[i] = typecheck(Target.Externs[i], ctxExpr)
@@ -93,7 +93,7 @@ func TypecheckPackage() {
// Phase 6: Decide how to capture closed variables.
// This needs to run before escape analysis,
// because variables captured by value do not escape.
- timings.Start("fe", "capturevars")
+ base.Timer.Start("fe", "capturevars")
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
@@ -162,9 +162,6 @@ func TypecheckImports() {
}
}
-// To enable tracing support (-t flag), set enableTrace to true.
-const enableTrace = false
-
var traceIndent []byte
var skipDowidthForTracing bool
@@ -234,7 +231,7 @@ func resolve(n ir.Node) (res ir.Node) {
}
// only trace if there's work to do
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("resolve", n)(&res)
}
@@ -379,7 +376,7 @@ func typecheck(n ir.Node, top int) (res ir.Node) {
}
// only trace if there's work to do
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheck", n)(&res)
}
@@ -568,7 +565,7 @@ func indexlit(n ir.Node) ir.Node {
// typecheck1 should ONLY be called from typecheck.
func typecheck1(n ir.Node, top int) (res ir.Node) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheck1", n)(&res)
}
@@ -2552,7 +2549,7 @@ func lookdot1(errnode ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, do
// typecheckMethodExpr checks selector expressions (ODOT) where the
// base expression is a type expression (OTYPE).
func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckMethodExpr", n)(&res)
}
@@ -2991,7 +2988,7 @@ func pushtype(nn ir.Node, t *types.Type) ir.Node {
// The result of typecheckcomplit MUST be assigned back to n, e.g.
// n.Left = typecheckcomplit(n.Left)
func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckcomplit", n)(&res)
}
@@ -3435,7 +3432,7 @@ func samesafeexpr(l ir.Node, r ir.Node) bool {
// if this assignment is the definition of a var on the left side,
// fill in the var's type.
func typecheckas(n *ir.AssignStmt) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas", n)(nil)
}
@@ -3493,7 +3490,7 @@ func checkassignto(src *types.Type, dst ir.Node) {
}
func typecheckas2(n *ir.AssignListStmt) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas2", n)(nil)
}
@@ -3627,7 +3624,7 @@ out:
// To be called by typecheck, not directly.
// (Call typecheckFunc instead.)
func typecheckfunc(n *ir.Func) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckfunc", n)(nil)
}
@@ -3691,7 +3688,7 @@ func checkMapKeys() {
}
func typecheckdeftype(n *ir.Name) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdeftype", n)(nil)
}
@@ -3723,7 +3720,7 @@ func typecheckdeftype(n *ir.Name) {
}
func typecheckdef(n ir.Node) {
- if enableTrace && base.Flag.LowerT {
+ if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdef", n)(nil)
}
diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go
index 610c6b6539..57edc43280 100644
--- a/src/cmd/compile/internal/gc/walk.go
+++ b/src/cmd/compile/internal/gc/walk.go
@@ -85,7 +85,7 @@ func walk(fn *ir.Func) {
ir.DumpList(s, Curfn.Enter)
}
- if instrumenting {
+ if base.Flag.Cfg.Instrumenting {
instrument(fn)
}
}
@@ -738,7 +738,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return as
}
- if !instrumenting && isZero(as.Y) {
+ if !base.Flag.Cfg.Instrumenting && isZero(as.Y) {
return as
}
@@ -1311,7 +1311,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
panic("unreachable")
case ir.OCOPY:
- return copyany(n.(*ir.BinaryExpr), init, instrumenting && !base.Flag.CompilingRuntime)
+ return copyany(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
case ir.OCLOSE:
// cannot use chanfn - closechan takes any, not chan any
@@ -1597,7 +1597,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OBYTES2STRTMP:
n := n.(*ir.ConvExpr)
n.X = walkexpr(n.X, init)
- if !instrumenting {
+ if !base.Flag.Cfg.Instrumenting {
// Let the backend handle OBYTES2STRTMP directly
// to avoid a function call to slicebytetostringtmp.
return n
@@ -1975,7 +1975,7 @@ func walkCall(n *ir.CallExpr, init *ir.Nodes) {
} else {
t = params.Field(i).Type
}
- if instrumenting || fncall(arg, t) {
+ if base.Flag.Cfg.Instrumenting || fncall(arg, t) {
// make assignment of fncall to tempAt
tmp := temp(t)
a := convas(ir.NewAssignStmt(base.Pos, tmp, arg), init)
@@ -2873,7 +2873,7 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
ptr1, len1 := backingArrayPtrLen(cheapexpr(slice, &nodes))
ptr2, len2 := backingArrayPtrLen(l2)
ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, typename(elemtype), ptr1, len1, ptr2, len2)
- } else if instrumenting && !base.Flag.CompilingRuntime {
+ } else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
// rely on runtime to instrument:
// copy(s[len(l1):], l2)
// l2 can be a slice or string.
@@ -2914,7 +2914,7 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
// isAppendOfMake assumes n has already been typechecked.
func isAppendOfMake(n ir.Node) bool {
- if base.Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
return false
}
@@ -3125,7 +3125,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
// General case, with no function calls left as arguments.
// Leave for gen, except that instrumentation requires old form.
- if !instrumenting || base.Flag.CompilingRuntime {
+ if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
return n
}
@@ -4055,7 +4055,7 @@ func canMergeLoads() bool {
// isRuneCount reports whether n is of the form len([]rune(string)).
// These are optimized into a call to runtime.countrunes.
func isRuneCount(n ir.Node) bool {
- return base.Flag.N == 0 && !instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
+ return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
}
func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Node {