aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2022-09-17 18:52:35 -0400
committerCherry Mui <cherryyz@google.com>2022-09-21 20:24:17 +0000
commite0a364b061060aa0e0ed14df5cdce284e878d605 (patch)
treee78b95a661f6cc308b2a2daf0b733aa76939c00a
parentd5a5db3b41e45a1c5ec39a1a8f91d863e9e7770d (diff)
downloadgo-e0a364b061060aa0e0ed14df5cdce284e878d605.tar.gz
go-e0a364b061060aa0e0ed14df5cdce284e878d605.zip
[release-branch.go1.18] cmd/compile: avoid using destination pointer base type in memmove optimization
The type of the source and destination of a memmove call isn't always accurate. It will always be a pointer (or an unsafe.Pointer), but the base type might not be accurate. This comes about because multiple copies of a pointer with different base types are coalesced into a single value. In the failing example, the IData selector of the input argument is a *[32]byte in one branch of the type switch, and a *[]byte in the other branch. During the expand_calls pass both IDatas become just copies of the input register. Those copies are deduped and an arbitrary one wins (in this case, *[]byte is the unfortunate winner). Generally an op v can rely on v.Type during rewrite rules. But relying on v.Args[i].Type is discouraged. Fixes #55151 Change-Id: I348fd9accf2058a87cd191eec01d39cda612f120 Reviewed-on: https://go-review.googlesource.com/c/go/+/431496 TryBot-Result: Gopher Robot <gobot@golang.org> Reviewed-by: Cherry Mui <cherryyz@google.com> Run-TryBot: Keith Randall <khr@golang.org> Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com> Reviewed-by: Keith Randall <khr@google.com> (cherry picked from commit e283473ebbebf4a80db166e7e852d03c5cff1a61) Reviewed-on: https://go-review.googlesource.com/c/go/+/431918
-rw-r--r--src/cmd/compile/internal/ssa/gen/generic.rules17
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go50
-rw-r--r--test/fixedbugs/issue55122.go42
-rw-r--r--test/fixedbugs/issue55122b.go43
4 files changed, 119 insertions, 33 deletions
diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules
index 6dbe9b47d0..b78d2aac3c 100644
--- a/src/cmd/compile/internal/ssa/gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/gen/generic.rules
@@ -2082,7 +2082,13 @@
// Inline small or disjoint runtime.memmove calls with constant length.
// See the comment in op Move in genericOps.go for discussion of the type.
-
+//
+// Note that we've lost any knowledge of the type and alignment requirements
+// of the source and destination. We only know the size, and that the type
+// contains no pointers.
+// The type of the move is not necessarily v.Args[0].Type().Elem()!
+// See issue 55122 for details.
+//
// Because expand calls runs after prove, constants useful to this pattern may not appear.
// Both versions need to exist; the memory and register variants.
//
@@ -2090,31 +2096,28 @@
(SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const(64|32) [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
&& sz >= 0
&& isSameCall(sym, "runtime.memmove")
- && t.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(s1, s2, s3, call)
- => (Move {t.Elem()} [int64(sz)] dst src mem)
+ => (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// Match post-expansion calls, register version.
(SelectN [0] call:(StaticCall {sym} dst src (Const(64|32) [sz]) mem))
&& sz >= 0
&& call.Uses == 1 // this will exclude all calls with results
&& isSameCall(sym, "runtime.memmove")
- && dst.Type.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(call)
- => (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ => (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// Match pre-expansion calls.
(SelectN [0] call:(StaticLECall {sym} dst src (Const(64|32) [sz]) mem))
&& sz >= 0
&& call.Uses == 1 // this will exclude all calls with results
&& isSameCall(sym, "runtime.memmove")
- && dst.Type.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(call)
- => (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ => (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// De-virtualize late-expanded interface calls into late-expanded static calls.
// Note that (ITab (IMake)) doesn't get rewritten until after the first opt pass,
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index fbf227562a..434402b55d 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -21053,8 +21053,8 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
- // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
- // result: (Move {t.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21084,21 +21084,20 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
if s3.Op != OpStore {
break
}
- t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
- if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
+ if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(t.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
- // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
- // result: (Move {t.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21128,21 +21127,20 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
if s3.Op != OpStore {
break
}
- t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
- if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
+ if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(t.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
- // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
- // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21160,18 +21158,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt64(call_2.AuxInt)
- if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
+ if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(dst.Type.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
- // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
- // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21189,18 +21187,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt32(call_2.AuxInt)
- if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
+ if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(dst.Type.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
- // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
- // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21218,18 +21216,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt64(call_2.AuxInt)
- if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
+ if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(dst.Type.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
- // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
- // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
+ // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
+ // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@@ -21247,12 +21245,12 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt32(call_2.AuxInt)
- if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
+ if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
- v.Aux = typeToAux(dst.Type.Elem())
+ v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
diff --git a/test/fixedbugs/issue55122.go b/test/fixedbugs/issue55122.go
new file mode 100644
index 0000000000..24da89dcb6
--- /dev/null
+++ b/test/fixedbugs/issue55122.go
@@ -0,0 +1,42 @@
+// run
+
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+func main() {
+ for i := 0; i < 10000; i++ {
+ h(i)
+ sink = make([]byte, 1024) // generate some garbage
+ }
+}
+
+func h(iter int) {
+ var x [32]byte
+ for i := 0; i < 32; i++ {
+ x[i] = 99
+ }
+ g(&x)
+ if x == ([32]byte{}) {
+ return
+ }
+ for i := 0; i < 32; i++ {
+ println(x[i])
+ }
+ panic(iter)
+}
+
+//go:noinline
+func g(x interface{}) {
+ switch e := x.(type) {
+ case *[32]byte:
+ var c [32]byte
+ *e = c
+ case *[]byte:
+ *e = nil
+ }
+}
+
+var sink []byte
diff --git a/test/fixedbugs/issue55122b.go b/test/fixedbugs/issue55122b.go
new file mode 100644
index 0000000000..a911a9f1b6
--- /dev/null
+++ b/test/fixedbugs/issue55122b.go
@@ -0,0 +1,43 @@
+// run
+
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+func main() {
+ for i := 0; i < 10000; i++ {
+ h(i)
+ sink = make([]byte, 1024) // generate some garbage
+ }
+}
+
+func h(iter int) {
+ var x [32]byte
+ for i := 0; i < 32; i++ {
+ x[i] = 99
+ }
+ g(&x)
+ if x == ([32]byte{}) {
+ return
+ }
+ for i := 0; i < 32; i++ {
+ println(x[i])
+ }
+ panic(iter)
+}
+
+//go:noinline
+func g(x interface{}) {
+ switch e := x.(type) {
+ case *[32]byte:
+ var c [32]byte
+ *e = c
+ case *[3]*byte:
+ var c [3]*byte
+ *e = c
+ }
+}
+
+var sink []byte