aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewritegeneric.go
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2021-01-07 14:57:53 -0800
committerKeith Randall <khr@golang.org>2021-01-08 05:00:06 +0000
commit304f769ffc68e64244266b3aadbf91e6738c0064 (patch)
treefe443868201a208fac12c485149f7922416d4c57 /src/cmd/compile/internal/ssa/rewritegeneric.go
parentae9771713383c1ee01a544cd50cfdbc22841380a (diff)
downloadgo-304f769ffc68e64244266b3aadbf91e6738c0064.tar.gz
go-304f769ffc68e64244266b3aadbf91e6738c0064.zip
cmd/compile: don't short-circuit copies whose source is volatile
Current optimization: When we copy a->b and then b->c, we might as well copy a->c instead of b->c (then b might be dead and go away). *Except* if a is a volatile location (might be clobbered by a call). In that case, we really do want to copy a immediately, because there might be a call before we can do the a->c copy. User calls can't happen in between, because the rule matches up the memory states. But calls inserted for memory barriers, particularly runtime.typedmemmove, can. (I guess we could introduce a register-calling-convention version of runtime.typedmemmove, but that seems a bigger change than this one.) Fixes #43570 Change-Id: Ifa518bb1a6f3a8dd46c352d4fd54ea9713b3eb1a Reviewed-on: https://go-review.googlesource.com/c/go/+/282492 Trust: Keith Randall <khr@golang.org> Trust: Josh Bleecher Snyder <josharian@gmail.com> Run-TryBot: Keith Randall <khr@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Josh Bleecher Snyder <josharian@gmail.com>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 4cb9a8f328..958e24d29f 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -13637,7 +13637,7 @@ func rewriteValuegeneric_OpMove(v *Value) bool {
return true
}
// match: (Move {t1} [s] dst tmp1 midmem:(Move {t2} [s] tmp2 src _))
- // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
+ // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
// result: (Move {t1} [s] dst src midmem)
for {
s := auxIntToInt64(v.AuxInt)
@@ -13651,7 +13651,7 @@ func rewriteValuegeneric_OpMove(v *Value) bool {
t2 := auxToType(midmem.Aux)
src := midmem.Args[1]
tmp2 := midmem.Args[0]
- if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
+ if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
break
}
v.reset(OpMove)
@@ -13661,7 +13661,7 @@ func rewriteValuegeneric_OpMove(v *Value) bool {
return true
}
// match: (Move {t1} [s] dst tmp1 midmem:(VarDef (Move {t2} [s] tmp2 src _)))
- // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
+ // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
// result: (Move {t1} [s] dst src midmem)
for {
s := auxIntToInt64(v.AuxInt)
@@ -13679,7 +13679,7 @@ func rewriteValuegeneric_OpMove(v *Value) bool {
t2 := auxToType(midmem_0.Aux)
src := midmem_0.Args[1]
tmp2 := midmem_0.Args[0]
- if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
+ if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
break
}
v.reset(OpMove)