aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewriteARM64.go
diff options
context:
space:
mode:
authorDavid Chase <drchase@google.com>2019-11-02 23:57:11 -0400
committerDavid Chase <drchase@google.com>2020-04-03 17:24:48 +0000
commit47ade08141b23cfeafed92943e16012d5dc5eb8b (patch)
tree0e38e95ad3de867e5151cf61e811c56999401a69 /src/cmd/compile/internal/ssa/rewriteARM64.go
parent3103495fa9bb166c9d0c56fbf3cd2146f32aef57 (diff)
downloadgo-47ade08141b23cfeafed92943e16012d5dc5eb8b.tar.gz
go-47ade08141b23cfeafed92943e16012d5dc5eb8b.zip
cmd/compile: add logging for large (>= 128 byte) copies
For 1.15, unless someone really wants it in 1.14. A performance-sensitive user thought this would be useful, though "large" was not well-defined. If 128 is large, there are 139 static instances of "large" copies in the compiler itself. Includes test. Change-Id: I81f20c62da59d37072429f3a22c1809e6fb2946d Reviewed-on: https://go-review.googlesource.com/c/go/+/205066 Run-TryBot: David Chase <drchase@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Cherry Zhang <cherryyz@google.com>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewriteARM64.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewriteARM64.go12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/cmd/compile/internal/ssa/rewriteARM64.go b/src/cmd/compile/internal/ssa/rewriteARM64.go
index 4d1ed50d9b..f6f77e9bb6 100644
--- a/src/cmd/compile/internal/ssa/rewriteARM64.go
+++ b/src/cmd/compile/internal/ssa/rewriteARM64.go
@@ -23742,14 +23742,14 @@ func rewriteValueARM64_OpMove(v *Value) bool {
return true
}
// match: (Move [s] dst src mem)
- // cond: s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice
+ // cond: s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice && logLargeCopy(v, s)
// result: (MOVDstore [s-8] dst (MOVDload [s-8] src mem) (DUFFCOPY <types.TypeMem> [8*(64-(s-8)/16)] dst src mem))
for {
s := v.AuxInt
dst := v_0
src := v_1
mem := v_2
- if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice) {
+ if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice && logLargeCopy(v, s)) {
break
}
v.reset(OpARM64MOVDstore)
@@ -23764,14 +23764,14 @@ func rewriteValueARM64_OpMove(v *Value) bool {
return true
}
// match: (Move [s] dst src mem)
- // cond: s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice
+ // cond: s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
// result: (DUFFCOPY [8 * (64 - s/16)] dst src mem)
for {
s := v.AuxInt
dst := v_0
src := v_1
mem := v_2
- if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) {
+ if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
break
}
v.reset(OpARM64DUFFCOPY)
@@ -23780,14 +23780,14 @@ func rewriteValueARM64_OpMove(v *Value) bool {
return true
}
// match: (Move [s] dst src mem)
- // cond: s > 24 && s%8 == 0
+ // cond: s > 24 && s%8 == 0 && logLargeCopy(v, s)
// result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem)
for {
s := v.AuxInt
dst := v_0
src := v_1
mem := v_2
- if !(s > 24 && s%8 == 0) {
+ if !(s > 24 && s%8 == 0 && logLargeCopy(v, s)) {
break
}
v.reset(OpARM64LoweredMove)