aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewriteMIPS64.go
diff options
context:
space:
mode:
authorDavid Chase <drchase@google.com>2019-11-02 23:57:11 -0400
committerDavid Chase <drchase@google.com>2020-04-03 17:24:48 +0000
commit47ade08141b23cfeafed92943e16012d5dc5eb8b (patch)
tree0e38e95ad3de867e5151cf61e811c56999401a69 /src/cmd/compile/internal/ssa/rewriteMIPS64.go
parent3103495fa9bb166c9d0c56fbf3cd2146f32aef57 (diff)
downloadgo-47ade08141b23cfeafed92943e16012d5dc5eb8b.tar.gz
go-47ade08141b23cfeafed92943e16012d5dc5eb8b.zip
cmd/compile: add logging for large (>= 128 byte) copies
For 1.15, unless someone really wants it in 1.14. A performance-sensitive user thought this would be useful, though "large" was not well-defined. If 128 is large, there are 139 static instances of "large" copies in the compiler itself. Includes test. Change-Id: I81f20c62da59d37072429f3a22c1809e6fb2946d Reviewed-on: https://go-review.googlesource.com/c/go/+/205066 Run-TryBot: David Chase <drchase@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Cherry Zhang <cherryyz@google.com>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewriteMIPS64.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewriteMIPS64.go8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/cmd/compile/internal/ssa/rewriteMIPS64.go b/src/cmd/compile/internal/ssa/rewriteMIPS64.go
index 125c33d002..360fdebe85 100644
--- a/src/cmd/compile/internal/ssa/rewriteMIPS64.go
+++ b/src/cmd/compile/internal/ssa/rewriteMIPS64.go
@@ -5533,7 +5533,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool {
return true
}
// match: (Move [s] {t} dst src mem)
- // cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
+ // cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
for {
s := v.AuxInt
@@ -5541,7 +5541,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool {
dst := v_0
src := v_1
mem := v_2
- if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) {
+ if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
break
}
v.reset(OpMIPS64DUFFCOPY)
@@ -5550,7 +5550,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool {
return true
}
// match: (Move [s] {t} dst src mem)
- // cond: s > 24 || t.(*types.Type).Alignment()%8 != 0
+ // cond: s > 24 && logLargeCopy(v, s) || t.(*types.Type).Alignment()%8 != 0
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
for {
s := v.AuxInt
@@ -5558,7 +5558,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool {
dst := v_0
src := v_1
mem := v_2
- if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) {
+ if !(s > 24 && logLargeCopy(v, s) || t.(*types.Type).Alignment()%8 != 0) {
break
}
v.reset(OpMIPS64LoweredMove)