aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa
diff options
context:
space:
mode:
authorCherry Zhang <cherryyz@google.com>2017-02-05 23:43:31 -0500
committerCherry Zhang <cherryyz@google.com>2017-03-16 14:24:21 +0000
commit9ebf3d5100a52b2c0ebcbf9754c02d1edf7a035f (patch)
tree48f33362d4d5c052850fb891595207f94ab984de /src/cmd/compile/internal/ssa
parent211c8c9f1a1d11a6f5c42e85ec78cd06a69fbe0c (diff)
downloadgo-9ebf3d5100a52b2c0ebcbf9754c02d1edf7a035f.tar.gz
go-9ebf3d5100a52b2c0ebcbf9754c02d1edf7a035f.zip
cmd/compile: move write barrier insertion to SSA
When the compiler insert write barriers, the frontend makes conservative decisions at an early stage. This sometimes have false positives because of the lack of information, for example, writes on stack. SSA's writebarrier pass identifies writes on stack and eliminates write barriers for them. This CL moves write barrier insertion into SSA. The frontend no longer makes decisions about write barriers, and simply does normal assignments and emits normal Store ops when building SSA. SSA writebarrier pass inserts write barrier for Stores when needed. There, it has better information about the store because Phi and Copy propagation are done at that time. This CL only changes StoreWB to Store in gc/ssa.go. A followup CL simplifies SSA building code. Updates #17583. Change-Id: I4592d9bc0067503befc169c50b4e6f4765673bec Reviewed-on: https://go-review.googlesource.com/36839 Run-TryBot: Cherry Zhang <cherryyz@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/ssa')
-rw-r--r--src/cmd/compile/internal/ssa/config.go3
-rw-r--r--src/cmd/compile/internal/ssa/export_test.go3
-rw-r--r--src/cmd/compile/internal/ssa/loop_test.go2
-rw-r--r--src/cmd/compile/internal/ssa/shift_test.go4
-rw-r--r--src/cmd/compile/internal/ssa/writebarrier.go53
-rw-r--r--src/cmd/compile/internal/ssa/writebarrier_test.go4
6 files changed, 45 insertions, 24 deletions
diff --git a/src/cmd/compile/internal/ssa/config.go b/src/cmd/compile/internal/ssa/config.go
index 978c0d6fa8..c447fc89c0 100644
--- a/src/cmd/compile/internal/ssa/config.go
+++ b/src/cmd/compile/internal/ssa/config.go
@@ -139,6 +139,9 @@ type Frontend interface {
// Syslook returns a symbol of the runtime function/variable with the
// given name.
Syslook(string) *obj.LSym
+
+ // UseWriteBarrier returns whether write barrier is enabled
+ UseWriteBarrier() bool
}
// interface used to hold *gc.Node. We'd use *gc.Node directly but
diff --git a/src/cmd/compile/internal/ssa/export_test.go b/src/cmd/compile/internal/ssa/export_test.go
index b687076a28..699e0efc20 100644
--- a/src/cmd/compile/internal/ssa/export_test.go
+++ b/src/cmd/compile/internal/ssa/export_test.go
@@ -88,6 +88,9 @@ func (DummyFrontend) AllocFrame(f *Func) {
func (DummyFrontend) Syslook(s string) *obj.LSym {
return obj.Linklookup(TestCtxt, s, 0)
}
+func (DummyFrontend) UseWriteBarrier() bool {
+ return true // only writebarrier_test cares
+}
func (d DummyFrontend) Logf(msg string, args ...interface{}) { d.t.Logf(msg, args...) }
func (d DummyFrontend) Log() bool { return true }
diff --git a/src/cmd/compile/internal/ssa/loop_test.go b/src/cmd/compile/internal/ssa/loop_test.go
index 0901263432..40c9aee4ad 100644
--- a/src/cmd/compile/internal/ssa/loop_test.go
+++ b/src/cmd/compile/internal/ssa/loop_test.go
@@ -66,7 +66,7 @@ func TestLoopConditionS390X(t *testing.T) {
Goto("b1")),
Bloc("b3",
Valu("retdef", OpVarDef, TypeMem, 0, nil, "mem"),
- Valu("store", OpStore, TypeMem, 8, nil, "ret", "phisum", "retdef"),
+ Valu("store", OpStore, TypeMem, 8, TypeInt64, "ret", "phisum", "retdef"),
Exit("store")))
CheckFunc(fun.f)
Compile(fun.f)
diff --git a/src/cmd/compile/internal/ssa/shift_test.go b/src/cmd/compile/internal/ssa/shift_test.go
index 488b7faf29..e6a5f9b5db 100644
--- a/src/cmd/compile/internal/ssa/shift_test.go
+++ b/src/cmd/compile/internal/ssa/shift_test.go
@@ -41,7 +41,7 @@ func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun {
Valu("load", OpLoad, typ, 0, nil, "argptr", "mem"),
Valu("c", OpConst64, TypeUInt64, amount, nil),
Valu("shift", op, typ, 0, nil, "load", "c"),
- Valu("store", OpStore, TypeMem, 8, nil, "resptr", "shift", "mem"),
+ Valu("store", OpStore, TypeMem, 8, TypeUInt64, "resptr", "shift", "mem"),
Exit("store")))
Compile(fun.f)
return fun
@@ -101,7 +101,7 @@ func makeShiftExtensionFunc(c *Config, amount int64, lshift, rshift Op, typ Type
Valu("c", OpConst64, TypeUInt64, amount, nil),
Valu("lshift", lshift, typ, 0, nil, "load", "c"),
Valu("rshift", rshift, typ, 0, nil, "lshift", "c"),
- Valu("store", OpStore, TypeMem, 8, nil, "resptr", "rshift", "mem"),
+ Valu("store", OpStore, TypeMem, 8, TypeUInt64, "resptr", "rshift", "mem"),
Exit("store")))
Compile(fun.f)
return fun
diff --git a/src/cmd/compile/internal/ssa/writebarrier.go b/src/cmd/compile/internal/ssa/writebarrier.go
index 961cf2b2a8..ad53089d80 100644
--- a/src/cmd/compile/internal/ssa/writebarrier.go
+++ b/src/cmd/compile/internal/ssa/writebarrier.go
@@ -9,8 +9,25 @@ import (
"cmd/internal/src"
)
-// writebarrier expands write barrier ops (StoreWB, MoveWB, etc.) into
-// branches and runtime calls, like
+// needwb returns whether we need write barrier for store op v.
+// v must be Store/Move/Zero.
+func needwb(v *Value) bool {
+ t, ok := v.Aux.(Type)
+ if !ok {
+ v.Fatalf("store aux is not a type: %s", v.LongString())
+ }
+ if !t.HasPointer() {
+ return false
+ }
+ if IsStackAddr(v.Args[0]) {
+ return false // write on stack doesn't need write barrier
+ }
+ return true
+}
+
+// writebarrier pass inserts write barriers for store ops (Store, Move, Zero)
+// when necessary (the condition above). It rewrites store ops to branches
+// and runtime calls, like
//
// if writeBarrier.enabled {
// writebarrierptr(ptr, val)
@@ -18,11 +35,13 @@ import (
// *ptr = val
// }
//
-// If ptr is an address of a stack slot, write barrier will be removed
-// and a normal store will be used.
// A sequence of WB stores for many pointer fields of a single type will
// be emitted together, with a single branch.
func writebarrier(f *Func) {
+ if !f.Config.fe.UseWriteBarrier() {
+ return
+ }
+
var sb, sp, wbaddr, const0 *Value
var writebarrierptr, typedmemmove, typedmemclr *obj.LSym
var stores, after []*Value
@@ -30,27 +49,23 @@ func writebarrier(f *Func) {
var storeNumber []int32
for _, b := range f.Blocks { // range loop is safe since the blocks we added contain no stores to expand
- // rewrite write barrier for stack writes to ordinary Store/Move/Zero,
- // record presence of non-stack WB ops.
+ // first, identify all the stores that need to insert a write barrier.
+ // mark them with WB ops temporarily. record presence of WB ops.
hasStore := false
for _, v := range b.Values {
switch v.Op {
- case OpStoreWB, OpMoveWB, OpZeroWB:
- if IsStackAddr(v.Args[0]) {
+ case OpStore, OpMove, OpZero:
+ if needwb(v) {
switch v.Op {
- case OpStoreWB:
- v.Op = OpStore
- case OpMoveWB:
- v.Op = OpMove
- v.Aux = nil
- case OpZeroWB:
- v.Op = OpZero
- v.Aux = nil
+ case OpStore:
+ v.Op = OpStoreWB
+ case OpMove:
+ v.Op = OpMoveWB
+ case OpZero:
+ v.Op = OpZeroWB
}
- continue
+ hasStore = true
}
- hasStore = true
- break
}
}
if !hasStore {
diff --git a/src/cmd/compile/internal/ssa/writebarrier_test.go b/src/cmd/compile/internal/ssa/writebarrier_test.go
index c2ba695971..aaaa35a064 100644
--- a/src/cmd/compile/internal/ssa/writebarrier_test.go
+++ b/src/cmd/compile/internal/ssa/writebarrier_test.go
@@ -17,8 +17,8 @@ func TestWriteBarrierStoreOrder(t *testing.T) {
Valu("sp", OpSP, TypeInvalid, 0, nil),
Valu("v", OpConstNil, ptrType, 0, nil),
Valu("addr1", OpAddr, ptrType, 0, nil, "sb"),
- Valu("wb2", OpStoreWB, TypeMem, 8, nil, "addr1", "v", "wb1"),
- Valu("wb1", OpStoreWB, TypeMem, 8, nil, "addr1", "v", "start"), // wb1 and wb2 are out of order
+ Valu("wb2", OpStore, TypeMem, 8, ptrType, "addr1", "v", "wb1"),
+ Valu("wb1", OpStore, TypeMem, 8, ptrType, "addr1", "v", "start"), // wb1 and wb2 are out of order
Goto("exit")),
Bloc("exit",
Exit("wb2")))