aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewritegeneric.go
diff options
context:
space:
mode:
authorCuong Manh Le <cuong.manhle.vn@gmail.com>2020-04-21 03:47:02 +0700
committerCuong Manh Le <cuong.manhle.vn@gmail.com>2020-04-21 03:45:26 +0000
commit7f8fda3c0bfcc832934aa75b4580b4c780e1cb52 (patch)
tree2dc5b6cbc36e8a0a1ac0e2ffb1e523e1695d754a /src/cmd/compile/internal/ssa/rewritegeneric.go
parent0f14c2a0423ed2416db38ca24edfcb47f46c4e4c (diff)
downloadgo-7f8fda3c0bfcc832934aa75b4580b4c780e1cb52.tar.gz
go-7f8fda3c0bfcc832934aa75b4580b4c780e1cb52.zip
cmd/compile: use proper magnitude for (x>>c) & uppermask = 0
This is followup of CL 228860, which rewrite shift rules to use typed aux. That CL introduced nlz* functions, to refactor left shift rules. While at it, we realize there's a bug in old rules with both right/left shift rules, but only fix for left shift rules only. This CL fixes the bug for right shift rules. Passes toolstash-check. Change-Id: Id8f2158b1b66c9e87f3fdeaa7ae3e35dc0666f8b Reviewed-on: https://go-review.googlesource.com/c/go/+/229137 Run-TryBot: Cuong Manh Le <cuong.manhle.vn@gmail.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Emmanuel Odeke <emm.odeke@gmail.com> Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 214de6448d..6d42f3a36c 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -1571,7 +1571,7 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool {
break
}
// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
- // cond: c >= int64(64-ntz16(m))
+ // cond: c >= int64(16-ntz16(m))
// result: (Const16 [0])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
@@ -1588,7 +1588,7 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool {
continue
}
c := auxIntToInt64(v_1_1.AuxInt)
- if !(c >= int64(64-ntz16(m))) {
+ if !(c >= int64(16-ntz16(m))) {
continue
}
v.reset(OpConst16)
@@ -1769,7 +1769,7 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool {
break
}
// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
- // cond: c >= int64(64-ntz32(m))
+ // cond: c >= int64(32-ntz32(m))
// result: (Const32 [0])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
@@ -1786,7 +1786,7 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool {
continue
}
c := auxIntToInt64(v_1_1.AuxInt)
- if !(c >= int64(64-ntz32(m))) {
+ if !(c >= int64(32-ntz32(m))) {
continue
}
v.reset(OpConst32)
@@ -2165,7 +2165,7 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool {
break
}
// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
- // cond: c >= int64(64-ntz8(m))
+ // cond: c >= int64(8-ntz8(m))
// result: (Const8 [0])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
@@ -2182,7 +2182,7 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool {
continue
}
c := auxIntToInt64(v_1_1.AuxInt)
- if !(c >= int64(64-ntz8(m))) {
+ if !(c >= int64(8-ntz8(m))) {
continue
}
v.reset(OpConst8)