aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/writebarrier.go
diff options
context:
space:
mode:
authorMatthew Dempsky <mdempsky@google.com>2018-03-27 13:50:08 -0700
committerMatthew Dempsky <mdempsky@google.com>2018-04-09 18:40:55 +0000
commit17df5ed910cab9c68bc781b06d83b8db3fd0f75c (patch)
tree3cb147db3c93fda7245f8949318309c6538a5302 /src/cmd/compile/internal/ssa/writebarrier.go
parent31700b83b5d9bdc2ddc474fd72b809a7b585d6da (diff)
downloadgo-17df5ed910cab9c68bc781b06d83b8db3fd0f75c.tar.gz
go-17df5ed910cab9c68bc781b06d83b8db3fd0f75c.zip
cmd/compile: insert instrumentation during SSA building
Insert appropriate race/msan calls before each memory operation during SSA construction. This is conceptually simple, but subtle because we need to be careful that inserted instrumentation calls don't clobber arguments that are currently being prepared for a user function call. reorder1 already handles introducing temporary variables for arguments in some cases. This CL changes it to use them for all arguments when instrumenting. Also, we can't SSA struct types with more than one field while instrumenting. Otherwise, concurrent uses of disjoint fields within an SSA-able struct can introduce false races. This is both somewhat better and somewhat worse than the old racewalk instrumentation pass. We're now able to easily recognize cases like constructing non-escaping closures on the stack or accessing closure variables don't need instrumentation calls. On the other hand, spilling escaping parameters to the heap now results in an instrumentation call. Overall, this CL results in a small net reduction in the number of instrumentation calls, but a small net increase in binary size for instrumented executables. cmd/go ends up with 5.6% fewer calls, but a 2.4% larger binary. Fixes #19054. Change-Id: I70d1dd32ad6340e6fdb691e6d5a01452f58e97f3 Reviewed-on: https://go-review.googlesource.com/102817 Reviewed-by: Cherry Zhang <cherryyz@google.com>
Diffstat (limited to 'src/cmd/compile/internal/ssa/writebarrier.go')
-rw-r--r--src/cmd/compile/internal/ssa/writebarrier.go34
1 files changed, 34 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/writebarrier.go b/src/cmd/compile/internal/ssa/writebarrier.go
index b11b87de23..f72299be5e 100644
--- a/src/cmd/compile/internal/ssa/writebarrier.go
+++ b/src/cmd/compile/internal/ssa/writebarrier.go
@@ -8,6 +8,7 @@ import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
+ "strings"
)
// needwb returns whether we need write barrier for store op v.
@@ -348,6 +349,39 @@ func IsStackAddr(v *Value) bool {
return false
}
+// IsSanitizerSafeAddr reports whether v is known to be an address
+// that doesn't need instrumentation.
+func IsSanitizerSafeAddr(v *Value) bool {
+ for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
+ v = v.Args[0]
+ }
+ switch v.Op {
+ case OpSP:
+ // Stack addresses are always safe.
+ return true
+ case OpITab, OpStringPtr, OpGetClosurePtr:
+ // Itabs, string data, and closure fields are
+ // read-only once initialized.
+ return true
+ case OpAddr:
+ switch v.Args[0].Op {
+ case OpSP:
+ return true
+ case OpSB:
+ sym := v.Aux.(*obj.LSym)
+ // TODO(mdempsky): Find a cleaner way to
+ // detect this. It would be nice if we could
+ // test sym.Type==objabi.SRODATA, but we don't
+ // initialize sym.Type until after function
+ // compilation.
+ if strings.HasPrefix(sym.Name, `"".statictmp_`) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
// isVolatile returns whether v is a pointer to argument region on stack which
// will be clobbered by a function call.
func isVolatile(v *Value) bool {