aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2021-09-19 13:51:37 -0700
committerKeith Randall <khr@golang.org>2021-09-20 16:25:36 +0000
commit6f35430faa622bad63a527c97e87b9291e480688 (patch)
treea022e734eb9c30da548910da8960d10efbbc21fa
parent2d9b4864201d76bd30b55a7d74d73a24cc3ae165 (diff)
downloadgo-6f35430faa622bad63a527c97e87b9291e480688.tar.gz
go-6f35430faa622bad63a527c97e87b9291e480688.zip
cmd/compile: allow rotates to be merged with logical ops on arm64
Fixes #48002 Change-Id: Ie3a157d55b291f5ac2ef4845e6ce4fefd84fc642 Reviewed-on: https://go-review.googlesource.com/c/go/+/350912 Trust: Keith Randall <khr@golang.org> Run-TryBot: Keith Randall <khr@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Cherry Mui <cherryyz@google.com>
-rw-r--r--src/cmd/compile/internal/arm64/ssa.go11
-rw-r--r--src/cmd/compile/internal/ssa/gen/ARM64.rules26
-rw-r--r--src/cmd/compile/internal/ssa/gen/ARM64Ops.go8
-rw-r--r--src/cmd/compile/internal/ssa/opGen.go124
-rw-r--r--src/cmd/compile/internal/ssa/rewrite.go4
-rw-r--r--src/cmd/compile/internal/ssa/rewriteARM64.go502
-rw-r--r--src/cmd/internal/obj/arm64/a.out.go7
-rw-r--r--test/codegen/rotate.go23
8 files changed, 702 insertions, 3 deletions
diff --git a/src/cmd/compile/internal/arm64/ssa.go b/src/cmd/compile/internal/arm64/ssa.go
index c5e7a08914..4770a0c488 100644
--- a/src/cmd/compile/internal/arm64/ssa.go
+++ b/src/cmd/compile/internal/arm64/ssa.go
@@ -315,6 +315,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
+ case ssa.OpARM64MVNshiftRO:
+ genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
case ssa.OpARM64ADDshiftLL,
ssa.OpARM64SUBshiftLL,
ssa.OpARM64ANDshiftLL,
@@ -342,6 +344,13 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
ssa.OpARM64ORNshiftRA,
ssa.OpARM64BICshiftRA:
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
+ case ssa.OpARM64ANDshiftRO,
+ ssa.OpARM64ORshiftRO,
+ ssa.OpARM64XORshiftRO,
+ ssa.OpARM64EONshiftRO,
+ ssa.OpARM64ORNshiftRO,
+ ssa.OpARM64BICshiftRO:
+ genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
case ssa.OpARM64MOVDconst:
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
@@ -389,6 +398,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
+ case ssa.OpARM64TSTshiftRO:
+ genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
case ssa.OpARM64MOVDaddr:
p := s.Prog(arm64.AMOVD)
p.From.Type = obj.TYPE_ADDR
diff --git a/src/cmd/compile/internal/ssa/gen/ARM64.rules b/src/cmd/compile/internal/ssa/gen/ARM64.rules
index 23ae3b1286..b722d1cf1c 100644
--- a/src/cmd/compile/internal/ssa/gen/ARM64.rules
+++ b/src/cmd/compile/internal/ssa/gen/ARM64.rules
@@ -1601,6 +1601,7 @@
(MVN x:(SLLconst [c] y)) && clobberIfDead(x) => (MVNshiftLL [c] y)
(MVN x:(SRLconst [c] y)) && clobberIfDead(x) => (MVNshiftRL [c] y)
(MVN x:(SRAconst [c] y)) && clobberIfDead(x) => (MVNshiftRA [c] y)
+(MVN x:(RORconst [c] y)) && clobberIfDead(x) => (MVNshiftRO [c] y)
(ADD x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ADDshiftLL x0 y [c])
(ADD x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ADDshiftRL x0 y [c])
(ADD x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ADDshiftRA x0 y [c])
@@ -1610,21 +1611,27 @@
(AND x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ANDshiftLL x0 y [c])
(AND x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ANDshiftRL x0 y [c])
(AND x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ANDshiftRA x0 y [c])
+(AND x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ANDshiftRO x0 y [c])
(OR x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ORshiftLL x0 y [c]) // useful for combined load
(OR x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ORshiftRL x0 y [c])
(OR x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ORshiftRA x0 y [c])
+(OR x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ORshiftRO x0 y [c])
(XOR x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (XORshiftLL x0 y [c])
(XOR x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (XORshiftRL x0 y [c])
(XOR x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (XORshiftRA x0 y [c])
+(XOR x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (XORshiftRO x0 y [c])
(BIC x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (BICshiftLL x0 y [c])
(BIC x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (BICshiftRL x0 y [c])
(BIC x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (BICshiftRA x0 y [c])
+(BIC x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (BICshiftRO x0 y [c])
(ORN x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ORNshiftLL x0 y [c])
(ORN x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ORNshiftRL x0 y [c])
(ORN x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ORNshiftRA x0 y [c])
+(ORN x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ORNshiftRO x0 y [c])
(EON x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (EONshiftLL x0 y [c])
(EON x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (EONshiftRL x0 y [c])
(EON x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (EONshiftRA x0 y [c])
+(EON x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (EONshiftRO x0 y [c])
(CMP x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (CMPshiftLL x0 y [c])
(CMP x0:(SLLconst [c] y) x1) && clobberIfDead(x0) => (InvertFlags (CMPshiftLL x1 y [c]))
(CMP x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (CMPshiftRL x0 y [c])
@@ -1637,6 +1644,7 @@
(TST x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (TSTshiftLL x0 y [c])
(TST x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (TSTshiftRL x0 y [c])
(TST x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (TSTshiftRA x0 y [c])
+(TST x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (TSTshiftRO x0 y [c])
// prefer *const ops to *shift ops
(ADDshiftLL (MOVDconst [c]) x [d]) => (ADDconst [c] (SLLconst <x.Type> x [d]))
@@ -1645,12 +1653,15 @@
(ANDshiftLL (MOVDconst [c]) x [d]) => (ANDconst [c] (SLLconst <x.Type> x [d]))
(ANDshiftRL (MOVDconst [c]) x [d]) => (ANDconst [c] (SRLconst <x.Type> x [d]))
(ANDshiftRA (MOVDconst [c]) x [d]) => (ANDconst [c] (SRAconst <x.Type> x [d]))
+(ANDshiftRO (MOVDconst [c]) x [d]) => (ANDconst [c] (RORconst <x.Type> x [d]))
(ORshiftLL (MOVDconst [c]) x [d]) => (ORconst [c] (SLLconst <x.Type> x [d]))
(ORshiftRL (MOVDconst [c]) x [d]) => (ORconst [c] (SRLconst <x.Type> x [d]))
(ORshiftRA (MOVDconst [c]) x [d]) => (ORconst [c] (SRAconst <x.Type> x [d]))
+(ORshiftRO (MOVDconst [c]) x [d]) => (ORconst [c] (RORconst <x.Type> x [d]))
(XORshiftLL (MOVDconst [c]) x [d]) => (XORconst [c] (SLLconst <x.Type> x [d]))
(XORshiftRL (MOVDconst [c]) x [d]) => (XORconst [c] (SRLconst <x.Type> x [d]))
(XORshiftRA (MOVDconst [c]) x [d]) => (XORconst [c] (SRAconst <x.Type> x [d]))
+(XORshiftRO (MOVDconst [c]) x [d]) => (XORconst [c] (RORconst <x.Type> x [d]))
(CMPshiftLL (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
(CMPshiftRL (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
(CMPshiftRA (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
@@ -1660,11 +1671,13 @@
(TSTshiftLL (MOVDconst [c]) x [d]) => (TSTconst [c] (SLLconst <x.Type> x [d]))
(TSTshiftRL (MOVDconst [c]) x [d]) => (TSTconst [c] (SRLconst <x.Type> x [d]))
(TSTshiftRA (MOVDconst [c]) x [d]) => (TSTconst [c] (SRAconst <x.Type> x [d]))
+(TSTshiftRO (MOVDconst [c]) x [d]) => (TSTconst [c] (RORconst <x.Type> x [d]))
// constant folding in *shift ops
(MVNshiftLL (MOVDconst [c]) [d]) => (MOVDconst [^int64(uint64(c)<<uint64(d))])
(MVNshiftRL (MOVDconst [c]) [d]) => (MOVDconst [^int64(uint64(c)>>uint64(d))])
(MVNshiftRA (MOVDconst [c]) [d]) => (MOVDconst [^(c>>uint64(d))])
+(MVNshiftRO (MOVDconst [c]) [d]) => (MOVDconst [^rotateRight64(c, d)])
(NEGshiftLL (MOVDconst [c]) [d]) => (MOVDconst [-int64(uint64(c)<<uint64(d))])
(NEGshiftRL (MOVDconst [c]) [d]) => (MOVDconst [-int64(uint64(c)>>uint64(d))])
(NEGshiftRA (MOVDconst [c]) [d]) => (MOVDconst [-(c>>uint64(d))])
@@ -1677,21 +1690,27 @@
(ANDshiftLL x (MOVDconst [c]) [d]) => (ANDconst x [int64(uint64(c)<<uint64(d))])
(ANDshiftRL x (MOVDconst [c]) [d]) => (ANDconst x [int64(uint64(c)>>uint64(d))])
(ANDshiftRA x (MOVDconst [c]) [d]) => (ANDconst x [c>>uint64(d)])
+(ANDshiftRO x (MOVDconst [c]) [d]) => (ANDconst x [rotateRight64(c, d)])
(ORshiftLL x (MOVDconst [c]) [d]) => (ORconst x [int64(uint64(c)<<uint64(d))])
(ORshiftRL x (MOVDconst [c]) [d]) => (ORconst x [int64(uint64(c)>>uint64(d))])
(ORshiftRA x (MOVDconst [c]) [d]) => (ORconst x [c>>uint64(d)])
+(ORshiftRO x (MOVDconst [c]) [d]) => (ORconst x [rotateRight64(c, d)])
(XORshiftLL x (MOVDconst [c]) [d]) => (XORconst x [int64(uint64(c)<<uint64(d))])
(XORshiftRL x (MOVDconst [c]) [d]) => (XORconst x [int64(uint64(c)>>uint64(d))])
(XORshiftRA x (MOVDconst [c]) [d]) => (XORconst x [c>>uint64(d)])
+(XORshiftRO x (MOVDconst [c]) [d]) => (XORconst x [rotateRight64(c, d)])
(BICshiftLL x (MOVDconst [c]) [d]) => (ANDconst x [^int64(uint64(c)<<uint64(d))])
(BICshiftRL x (MOVDconst [c]) [d]) => (ANDconst x [^int64(uint64(c)>>uint64(d))])
(BICshiftRA x (MOVDconst [c]) [d]) => (ANDconst x [^(c>>uint64(d))])
+(BICshiftRO x (MOVDconst [c]) [d]) => (ANDconst x [^rotateRight64(c, d)])
(ORNshiftLL x (MOVDconst [c]) [d]) => (ORconst x [^int64(uint64(c)<<uint64(d))])
(ORNshiftRL x (MOVDconst [c]) [d]) => (ORconst x [^int64(uint64(c)>>uint64(d))])
(ORNshiftRA x (MOVDconst [c]) [d]) => (ORconst x [^(c>>uint64(d))])
+(ORNshiftRO x (MOVDconst [c]) [d]) => (ORconst x [^rotateRight64(c, d)])
(EONshiftLL x (MOVDconst [c]) [d]) => (XORconst x [^int64(uint64(c)<<uint64(d))])
(EONshiftRL x (MOVDconst [c]) [d]) => (XORconst x [^int64(uint64(c)>>uint64(d))])
(EONshiftRA x (MOVDconst [c]) [d]) => (XORconst x [^(c>>uint64(d))])
+(EONshiftRO x (MOVDconst [c]) [d]) => (XORconst x [^rotateRight64(c, d)])
(CMPshiftLL x (MOVDconst [c]) [d]) => (CMPconst x [int64(uint64(c)<<uint64(d))])
(CMPshiftRL x (MOVDconst [c]) [d]) => (CMPconst x [int64(uint64(c)>>uint64(d))])
(CMPshiftRA x (MOVDconst [c]) [d]) => (CMPconst x [c>>uint64(d)])
@@ -1701,6 +1720,7 @@
(TSTshiftLL x (MOVDconst [c]) [d]) => (TSTconst x [int64(uint64(c)<<uint64(d))])
(TSTshiftRL x (MOVDconst [c]) [d]) => (TSTconst x [int64(uint64(c)>>uint64(d))])
(TSTshiftRA x (MOVDconst [c]) [d]) => (TSTconst x [c>>uint64(d)])
+(TSTshiftRO x (MOVDconst [c]) [d]) => (TSTconst x [rotateRight64(c, d)])
// simplification with *shift ops
(SUBshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
@@ -1709,21 +1729,27 @@
(ANDshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
(ANDshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
(ANDshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
+(ANDshiftRO x y:(RORconst x [c]) [d]) && c==d => y
(ORshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
(ORshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
(ORshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
+(ORshiftRO x y:(RORconst x [c]) [d]) && c==d => y
(XORshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
(XORshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [0])
(XORshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [0])
+(XORshiftRO x (RORconst x [c]) [d]) && c==d => (MOVDconst [0])
(BICshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
(BICshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [0])
(BICshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [0])
+(BICshiftRO x (RORconst x [c]) [d]) && c==d => (MOVDconst [0])
(EONshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [-1])
(EONshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [-1])
(EONshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [-1])
+(EONshiftRO x (RORconst x [c]) [d]) && c==d => (MOVDconst [-1])
(ORNshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [-1])
(ORNshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [-1])
(ORNshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [-1])
+(ORNshiftRO x (RORconst x [c]) [d]) && c==d => (MOVDconst [-1])
// Generate rotates with const shift
(ADDshiftLL [c] (SRLconst x [64-c]) x) => (RORconst [64-c] x)
diff --git a/src/cmd/compile/internal/ssa/gen/ARM64Ops.go b/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
index 41231fb809..e3ebb6e1af 100644
--- a/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
@@ -302,6 +302,7 @@ func init() {
{name: "MVNshiftLL", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0<<auxInt), auxInt should be in the range 0 to 63.
{name: "MVNshiftRL", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "MVNshiftRA", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0>>auxInt), signed shift, auxInt should be in the range 0 to 63.
+ {name: "MVNshiftRO", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
{name: "NEGshiftLL", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0<<auxInt), auxInt should be in the range 0 to 63.
{name: "NEGshiftRL", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "NEGshiftRA", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0>>auxInt), signed shift, auxInt should be in the range 0 to 63.
@@ -314,21 +315,27 @@ func init() {
{name: "ANDshiftLL", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1<<auxInt), auxInt should be in the range 0 to 63.
{name: "ANDshiftRL", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "ANDshiftRA", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
+ {name: "ANDshiftRO", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
{name: "ORshiftLL", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1<<auxInt, auxInt should be in the range 0 to 63.
{name: "ORshiftRL", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
{name: "ORshiftRA", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
+ {name: "ORshiftRO", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1 ROR auxInt, signed shift, auxInt should be in the range 0 to 63.
{name: "XORshiftLL", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1<<auxInt, auxInt should be in the range 0 to 63.
{name: "XORshiftRL", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
{name: "XORshiftRA", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
+ {name: "XORshiftRO", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1 ROR auxInt, signed shift, auxInt should be in the range 0 to 63.
{name: "BICshiftLL", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1<<auxInt), auxInt should be in the range 0 to 63.
{name: "BICshiftRL", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "BICshiftRA", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
+ {name: "BICshiftRO", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
{name: "EONshiftLL", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1<<auxInt), auxInt should be in the range 0 to 63.
{name: "EONshiftRL", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "EONshiftRA", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
+ {name: "EONshiftRO", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
{name: "ORNshiftLL", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1<<auxInt), auxInt should be in the range 0 to 63.
{name: "ORNshiftRL", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
{name: "ORNshiftRA", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
+ {name: "ORNshiftRO", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
{name: "CMPshiftLL", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1<<auxInt, auxInt should be in the range 0 to 63.
{name: "CMPshiftRL", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
{name: "CMPshiftRA", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
@@ -338,6 +345,7 @@ func init() {
{name: "TSTshiftLL", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1<<auxInt) compare to 0, auxInt should be in the range 0 to 63.
{name: "TSTshiftRL", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1>>auxInt) compare to 0, unsigned shift, auxInt should be in the range 0 to 63.
{name: "TSTshiftRA", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1>>auxInt) compare to 0, signed shift, auxInt should be in the range 0 to 63.
+ {name: "TSTshiftRO", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1 ROR auxInt) compare to 0, signed shift, auxInt should be in the range 0 to 63.
// bitfield ops
// for all bitfield ops lsb is auxInt>>8, width is auxInt&0xff
diff --git a/src/cmd/compile/internal/ssa/opGen.go b/src/cmd/compile/internal/ssa/opGen.go
index 4ec13a6629..eb7e4b91bb 100644
--- a/src/cmd/compile/internal/ssa/opGen.go
+++ b/src/cmd/compile/internal/ssa/opGen.go
@@ -1412,6 +1412,7 @@ const (
OpARM64MVNshiftLL
OpARM64MVNshiftRL
OpARM64MVNshiftRA
+ OpARM64MVNshiftRO
OpARM64NEGshiftLL
OpARM64NEGshiftRL
OpARM64NEGshiftRA
@@ -1424,21 +1425,27 @@ const (
OpARM64ANDshiftLL
OpARM64ANDshiftRL
OpARM64ANDshiftRA
+ OpARM64ANDshiftRO
OpARM64ORshiftLL
OpARM64ORshiftRL
OpARM64ORshiftRA
+ OpARM64ORshiftRO
OpARM64XORshiftLL
OpARM64XORshiftRL
OpARM64XORshiftRA
+ OpARM64XORshiftRO
OpARM64BICshiftLL
OpARM64BICshiftRL
OpARM64BICshiftRA
+ OpARM64BICshiftRO
OpARM64EONshiftLL
OpARM64EONshiftRL
OpARM64EONshiftRA
+ OpARM64EONshiftRO
OpARM64ORNshiftLL
OpARM64ORNshiftRL
OpARM64ORNshiftRA
+ OpARM64ORNshiftRO
OpARM64CMPshiftLL
OpARM64CMPshiftRL
OpARM64CMPshiftRA
@@ -1448,6 +1455,7 @@ const (
OpARM64TSTshiftLL
OpARM64TSTshiftRL
OpARM64TSTshiftRA
+ OpARM64TSTshiftRO
OpARM64BFI
OpARM64BFXIL
OpARM64SBFIZ
@@ -18808,6 +18816,20 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "MVNshiftRO",
+ auxType: auxInt64,
+ argLen: 1,
+ asm: arm64.AMVN,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "NEGshiftLL",
auxType: auxInt64,
argLen: 1,
@@ -18985,6 +19007,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "ANDshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.AAND,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "ORshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19030,6 +19067,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "ORshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.AORR,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "XORshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19075,6 +19127,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "XORshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.AEOR,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "BICshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19120,6 +19187,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "BICshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.ABIC,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "EONshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19165,6 +19247,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "EONshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.AEON,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "ORNshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19210,6 +19307,21 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "ORNshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.AORN,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ outputs: []outputInfo{
+ {0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
+ },
+ },
+ },
+ {
name: "CMPshiftLL",
auxType: auxInt64,
argLen: 2,
@@ -19318,6 +19430,18 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "TSTshiftRO",
+ auxType: auxInt64,
+ argLen: 2,
+ asm: arm64.ATST,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ {1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
+ },
+ },
+ },
+ {
name: "BFI",
auxType: auxARM64BitField,
argLen: 2,
diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go
index 162d42773a..79f9efaebf 100644
--- a/src/cmd/compile/internal/ssa/rewrite.go
+++ b/src/cmd/compile/internal/ssa/rewrite.go
@@ -1567,6 +1567,10 @@ func rotateLeft32(v, rotate int64) int64 {
return int64(bits.RotateLeft32(uint32(v), int(rotate)))
}
+func rotateRight64(v, rotate int64) int64 {
+ return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
+}
+
// encodes the lsb and width for arm(64) bitfield ops into the expected auxInt format.
func armBFAuxInt(lsb, width int64) arm64BitField {
if lsb < 0 || lsb > 63 {
diff --git a/src/cmd/compile/internal/ssa/rewriteARM64.go b/src/cmd/compile/internal/ssa/rewriteARM64.go
index 661714307a..06cc5a62b2 100644
--- a/src/cmd/compile/internal/ssa/rewriteARM64.go
+++ b/src/cmd/compile/internal/ssa/rewriteARM64.go
@@ -29,6 +29,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64ANDshiftRA(v)
case OpARM64ANDshiftRL:
return rewriteValueARM64_OpARM64ANDshiftRL(v)
+ case OpARM64ANDshiftRO:
+ return rewriteValueARM64_OpARM64ANDshiftRO(v)
case OpARM64BIC:
return rewriteValueARM64_OpARM64BIC(v)
case OpARM64BICshiftLL:
@@ -37,6 +39,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64BICshiftRA(v)
case OpARM64BICshiftRL:
return rewriteValueARM64_OpARM64BICshiftRL(v)
+ case OpARM64BICshiftRO:
+ return rewriteValueARM64_OpARM64BICshiftRO(v)
case OpARM64CMN:
return rewriteValueARM64_OpARM64CMN(v)
case OpARM64CMNW:
@@ -89,6 +93,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64EONshiftRA(v)
case OpARM64EONshiftRL:
return rewriteValueARM64_OpARM64EONshiftRL(v)
+ case OpARM64EONshiftRO:
+ return rewriteValueARM64_OpARM64EONshiftRO(v)
case OpARM64Equal:
return rewriteValueARM64_OpARM64Equal(v)
case OpARM64FADDD:
@@ -295,6 +301,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64MVNshiftRA(v)
case OpARM64MVNshiftRL:
return rewriteValueARM64_OpARM64MVNshiftRL(v)
+ case OpARM64MVNshiftRO:
+ return rewriteValueARM64_OpARM64MVNshiftRO(v)
case OpARM64NEG:
return rewriteValueARM64_OpARM64NEG(v)
case OpARM64NEGshiftLL:
@@ -315,6 +323,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64ORNshiftRA(v)
case OpARM64ORNshiftRL:
return rewriteValueARM64_OpARM64ORNshiftRL(v)
+ case OpARM64ORNshiftRO:
+ return rewriteValueARM64_OpARM64ORNshiftRO(v)
case OpARM64ORconst:
return rewriteValueARM64_OpARM64ORconst(v)
case OpARM64ORshiftLL:
@@ -323,6 +333,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64ORshiftRA(v)
case OpARM64ORshiftRL:
return rewriteValueARM64_OpARM64ORshiftRL(v)
+ case OpARM64ORshiftRO:
+ return rewriteValueARM64_OpARM64ORshiftRO(v)
case OpARM64ROR:
return rewriteValueARM64_OpARM64ROR(v)
case OpARM64RORW:
@@ -371,6 +383,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64TSTshiftRA(v)
case OpARM64TSTshiftRL:
return rewriteValueARM64_OpARM64TSTshiftRL(v)
+ case OpARM64TSTshiftRO:
+ return rewriteValueARM64_OpARM64TSTshiftRO(v)
case OpARM64UBFIZ:
return rewriteValueARM64_OpARM64UBFIZ(v)
case OpARM64UBFX:
@@ -393,6 +407,8 @@ func rewriteValueARM64(v *Value) bool {
return rewriteValueARM64_OpARM64XORshiftRA(v)
case OpARM64XORshiftRL:
return rewriteValueARM64_OpARM64XORshiftRL(v)
+ case OpARM64XORshiftRO:
+ return rewriteValueARM64_OpARM64XORshiftRO(v)
case OpAbs:
v.Op = OpARM64FABSD
return true
@@ -2114,6 +2130,28 @@ func rewriteValueARM64_OpARM64AND(v *Value) bool {
}
break
}
+ // match: (AND x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (ANDshiftRO x0 y [c])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ continue
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ continue
+ }
+ v.reset(OpARM64ANDshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
+ break
+ }
return false
}
func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
@@ -2403,6 +2441,60 @@ func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ // match: (ANDshiftRO (MOVDconst [c]) x [d])
+ // result: (ANDconst [c] (RORconst <x.Type> x [d]))
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ x := v_1
+ v.reset(OpARM64ANDconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (ANDshiftRO x (MOVDconst [c]) [d])
+ // result: (ANDconst x [rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64ANDconst)
+ v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (ANDshiftRO x y:(RORconst x [c]) [d])
+ // cond: c==d
+ // result: y
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ y := v_1
+ if y.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(y.AuxInt)
+ if x != y.Args[0] || !(c == d) {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64BIC(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
@@ -2487,6 +2579,25 @@ func rewriteValueARM64_OpARM64BIC(v *Value) bool {
v.AddArg2(x0, y)
return true
}
+ // match: (BIC x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (BICshiftRO x0 y [c])
+ for {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ break
+ }
+ v.reset(OpARM64BICshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
return false
}
func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
@@ -2597,6 +2708,42 @@ func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (BICshiftRO x (MOVDconst [c]) [d])
+ // result: (ANDconst x [^rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64ANDconst)
+ v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (BICshiftRO x (RORconst x [c]) [d])
+ // cond: c==d
+ // result: (MOVDconst [0])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if x != v_1.Args[0] || !(c == d) {
+ break
+ }
+ v.reset(OpARM64MOVDconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64CMN(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
@@ -3937,6 +4084,25 @@ func rewriteValueARM64_OpARM64EON(v *Value) bool {
v.AddArg2(x0, y)
return true
}
+ // match: (EON x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (EONshiftRO x0 y [c])
+ for {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ break
+ }
+ v.reset(OpARM64EONshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
return false
}
func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
@@ -4047,6 +4213,42 @@ func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (EONshiftRO x (MOVDconst [c]) [d])
+ // result: (XORconst x [^rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64XORconst)
+ v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (EONshiftRO x (RORconst x [c]) [d])
+ // cond: c==d
+ // result: (MOVDconst [-1])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if x != v_1.Args[0] || !(c == d) {
+ break
+ }
+ v.reset(OpARM64MOVDconst)
+ v.AuxInt = int64ToAuxInt(-1)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64Equal(v *Value) bool {
v_0 := v.Args[0]
// match: (Equal (FlagConstant [fc]))
@@ -15615,6 +15817,24 @@ func rewriteValueARM64_OpARM64MVN(v *Value) bool {
v.AddArg(y)
return true
}
+ // match: (MVN x:(RORconst [c] y))
+ // cond: clobberIfDead(x)
+ // result: (MVNshiftRO [c] y)
+ for {
+ x := v_0
+ if x.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(x.AuxInt)
+ y := x.Args[0]
+ if !(clobberIfDead(x)) {
+ break
+ }
+ v.reset(OpARM64MVNshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg(y)
+ return true
+ }
return false
}
func rewriteValueARM64_OpARM64MVNshiftLL(v *Value) bool {
@@ -15665,6 +15885,22 @@ func rewriteValueARM64_OpARM64MVNshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64MVNshiftRO(v *Value) bool {
+ v_0 := v.Args[0]
+ // match: (MVNshiftRO (MOVDconst [c]) [d])
+ // result: (MOVDconst [^rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v.reset(OpARM64MOVDconst)
+ v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64NEG(v *Value) bool {
v_0 := v.Args[0]
// match: (NEG (MUL x y))
@@ -15954,6 +16190,28 @@ func rewriteValueARM64_OpARM64OR(v *Value) bool {
}
break
}
+ // match: (OR x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (ORshiftRO x0 y [c])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ continue
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ continue
+ }
+ v.reset(OpARM64ORshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
+ break
+ }
// match: (OR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> [cc] (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
// cond: cc == OpARM64LessThanU
// result: (ROR x (NEG <t> y))
@@ -17923,6 +18181,25 @@ func rewriteValueARM64_OpARM64ORN(v *Value) bool {
v.AddArg2(x0, y)
return true
}
+ // match: (ORN x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (ORNshiftRO x0 y [c])
+ for {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ break
+ }
+ v.reset(OpARM64ORNshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
return false
}
func rewriteValueARM64_OpARM64ORNshiftLL(v *Value) bool {
@@ -18033,6 +18310,42 @@ func rewriteValueARM64_OpARM64ORNshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64ORNshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (ORNshiftRO x (MOVDconst [c]) [d])
+ // result: (ORconst x [^rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64ORconst)
+ v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (ORNshiftRO x (RORconst x [c]) [d])
+ // cond: c==d
+ // result: (MOVDconst [-1])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if x != v_1.Args[0] || !(c == d) {
+ break
+ }
+ v.reset(OpARM64MOVDconst)
+ v.AuxInt = int64ToAuxInt(-1)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64ORconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ORconst [0] x)
@@ -19970,6 +20283,60 @@ func rewriteValueARM64_OpARM64ORshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64ORshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ // match: (ORshiftRO (MOVDconst [c]) x [d])
+ // result: (ORconst [c] (RORconst <x.Type> x [d]))
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ x := v_1
+ v.reset(OpARM64ORconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (ORshiftRO x (MOVDconst [c]) [d])
+ // result: (ORconst x [rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64ORconst)
+ v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (ORshiftRO x y:(RORconst x [c]) [d])
+ // cond: c==d
+ // result: y
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ y := v_1
+ if y.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(y.AuxInt)
+ if x != y.Args[0] || !(c == d) {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64ROR(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
@@ -21198,6 +21565,28 @@ func rewriteValueARM64_OpARM64TST(v *Value) bool {
}
break
}
+ // match: (TST x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (TSTshiftRO x0 y [c])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ continue
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ continue
+ }
+ v.reset(OpARM64TSTshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
+ break
+ }
return false
}
func rewriteValueARM64_OpARM64TSTW(v *Value) bool {
@@ -21364,6 +21753,43 @@ func rewriteValueARM64_OpARM64TSTshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64TSTshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ // match: (TSTshiftRO (MOVDconst [c]) x [d])
+ // result: (TSTconst [c] (RORconst <x.Type> x [d]))
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ x := v_1
+ v.reset(OpARM64TSTconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (TSTshiftRO x (MOVDconst [c]) [d])
+ // result: (TSTconst x [rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64TSTconst)
+ v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpARM64UBFIZ(v *Value) bool {
v_0 := v.Args[0]
// match: (UBFIZ [bfc] (SLLconst [sc] x))
@@ -21823,6 +22249,28 @@ func rewriteValueARM64_OpARM64XOR(v *Value) bool {
}
break
}
+ // match: (XOR x0 x1:(RORconst [c] y))
+ // cond: clobberIfDead(x1)
+ // result: (XORshiftRO x0 y [c])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x0 := v_0
+ x1 := v_1
+ if x1.Op != OpARM64RORconst {
+ continue
+ }
+ c := auxIntToInt64(x1.AuxInt)
+ y := x1.Args[0]
+ if !(clobberIfDead(x1)) {
+ continue
+ }
+ v.reset(OpARM64XORshiftRO)
+ v.AuxInt = int64ToAuxInt(c)
+ v.AddArg2(x0, y)
+ return true
+ }
+ break
+ }
// match: (XOR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> [cc] (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
// cond: cc == OpARM64LessThanU
// result: (ROR x (NEG <t> y))
@@ -22512,6 +22960,60 @@ func rewriteValueARM64_OpARM64XORshiftRL(v *Value) bool {
}
return false
}
+func rewriteValueARM64_OpARM64XORshiftRO(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ // match: (XORshiftRO (MOVDconst [c]) x [d])
+ // result: (XORconst [c] (RORconst <x.Type> x [d]))
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ x := v_1
+ v.reset(OpARM64XORconst)
+ v.AuxInt = int64ToAuxInt(c)
+ v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (XORshiftRO x (MOVDconst [c]) [d])
+ // result: (XORconst x [rotateRight64(c, d)])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ v.reset(OpARM64XORconst)
+ v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
+ v.AddArg(x)
+ return true
+ }
+ // match: (XORshiftRO x (RORconst x [c]) [d])
+ // cond: c==d
+ // result: (MOVDconst [0])
+ for {
+ d := auxIntToInt64(v.AuxInt)
+ x := v_0
+ if v_1.Op != OpARM64RORconst {
+ break
+ }
+ c := auxIntToInt64(v_1.AuxInt)
+ if x != v_1.Args[0] || !(c == d) {
+ break
+ }
+ v.reset(OpARM64MOVDconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ return false
+}
func rewriteValueARM64_OpAddr(v *Value) bool {
v_0 := v.Args[0]
// match: (Addr {sym} base)
diff --git a/src/cmd/internal/obj/arm64/a.out.go b/src/cmd/internal/obj/arm64/a.out.go
index bf75bb4a89..aa7c54df9a 100644
--- a/src/cmd/internal/obj/arm64/a.out.go
+++ b/src/cmd/internal/obj/arm64/a.out.go
@@ -1060,9 +1060,10 @@ const (
const (
// shift types
- SHIFT_LL = 0 << 22
- SHIFT_LR = 1 << 22
- SHIFT_AR = 2 << 22
+ SHIFT_LL = 0 << 22
+ SHIFT_LR = 1 << 22
+ SHIFT_AR = 2 << 22
+ SHIFT_ROR = 3 << 22
)
// Arrangement for ARM64 SIMD instructions
diff --git a/test/codegen/rotate.go b/test/codegen/rotate.go
index 70489a2adc..204efaeafc 100644
--- a/test/codegen/rotate.go
+++ b/test/codegen/rotate.go
@@ -226,3 +226,26 @@ func checkMaskedRotate32(a []uint32, r int) {
a[i] = bits.RotateLeft32(a[3], 4) & 0xFFF00FFF
i++
}
+
+// combined arithmetic and rotate on arm64
+func checkArithmeticWithRotate(a *[1000]uint64) {
+ // arm64: "AND\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[2] = a[1] & bits.RotateLeft64(a[0], 13)
+ // arm64: "ORR\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[5] = a[4] | bits.RotateLeft64(a[3], 13)
+ // arm64: "EOR\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[8] = a[7] ^ bits.RotateLeft64(a[6], 13)
+ // arm64: "MVN\tR[0-9]+@>51, R[0-9]+"
+ a[10] = ^bits.RotateLeft64(a[9], 13)
+ // arm64: "BIC\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[13] = a[12] &^ bits.RotateLeft64(a[11], 13)
+ // arm64: "EON\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[16] = a[15] ^ ^bits.RotateLeft64(a[14], 13)
+ // arm64: "ORN\tR[0-9]+@>51, R[0-9]+, R[0-9]+"
+ a[19] = a[18] | ^bits.RotateLeft64(a[17], 13)
+ // arm64: "TST\tR[0-9]+@>51, R[0-9]+"
+ if a[18]&bits.RotateLeft64(a[19], 13) == 0 {
+ a[20] = 1
+ }
+
+}