aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/gen/PPC64.rules
diff options
context:
space:
mode:
Diffstat (limited to 'src/cmd/compile/internal/ssa/gen/PPC64.rules')
-rw-r--r--src/cmd/compile/internal/ssa/gen/PPC64.rules166
1 files changed, 140 insertions, 26 deletions
diff --git a/src/cmd/compile/internal/ssa/gen/PPC64.rules b/src/cmd/compile/internal/ssa/gen/PPC64.rules
index 8fe6da2eb2..23906fa466 100644
--- a/src/cmd/compile/internal/ssa/gen/PPC64.rules
+++ b/src/cmd/compile/internal/ssa/gen/PPC64.rules
@@ -5,30 +5,54 @@
// Lowering arithmetic
(Add64 x y) -> (ADD x y)
(AddPtr x y) -> (ADD x y)
-(Add32 x y) -> (ADD (SignExt32to64 x) (SignExt32to64 y))
-(Add16 x y) -> (ADD (SignExt16to64 x) (SignExt16to64 y))
-(Add8 x y) -> (ADD (SignExt8to64 x) (SignExt8to64 y))
+(Add32 x y) -> (ADD x y)
+(Add16 x y) -> (ADD x y)
+(Add8 x y) -> (ADD x y)
(Add64F x y) -> (FADD x y)
(Add32F x y) -> (FADDS x y)
(Sub64 x y) -> (SUB x y)
(SubPtr x y) -> (SUB x y)
(Sub32 x y) -> (SUB x y)
-(Sub16 x y) -> (SUB (SignExt16to64 x) (SignExt16to64 y))
-(Sub8 x y) -> (SUB (SignExt8to64 x) (SignExt8to64 y))
+(Sub16 x y) -> (SUB x y)
+(Sub8 x y) -> (SUB x y)
(Sub32F x y) -> (FSUBS x y)
(Sub64F x y) -> (FSUB x y)
(Mul64 x y) -> (MULLD x y)
(Mul32 x y) -> (MULLW x y)
-(Mul16 x y) -> (MULLW (SignExt16to32 x) (SignExt16to32 y))
-(Mul8 x y) -> (MULLW (SignExt8to32 x) (SignExt8to32 y))
+(Mul16 x y) -> (MULLW x y)
+(Mul8 x y) -> (MULLW x y)
+
+(Hmul64 x y) -> (MULHD x y)
+(Hmul64u x y) -> (MULHDU x y)
+(Hmul32 x y) -> (MULHW x y)
+(Hmul32u x y) -> (MULHWU x y)
+(Hmul16 x y) -> (SRAWconst (MULLW <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16])
+(Hmul16u x y) -> (SRWconst (MULLW <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16])
+(Hmul8 x y) -> (SRAWconst (MULLW <config.fe.TypeInt16()> (SignExt8to32 x) (SignExt8to32 y)) [8])
+(Hmul8u x y) -> (SRWconst (MULLW <config.fe.TypeUInt16()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8])
+
(Mul32F x y) -> (FMULS x y)
(Mul64F x y) -> (FMUL x y)
(Div32F x y) -> (FDIVS x y)
(Div64F x y) -> (FDIV x y)
+(Rsh64x64 x y) -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDIforC [-64] y))))
+(Rsh64Ux64 x y) -> (SRD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDIforC [-64] y))))
+(Lsh64x64 x y) -> (SLD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDIforC [-64] y))))
+
+(Rsh32x32 x y) -> (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry <config.fe.TypeInt64()> (ADDIforC [-32] (ZeroExt32to64 y)))))
+(Rsh32Ux32 x y) -> (SRW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry <config.fe.TypeInt64()> (ADDIforC [-32] (ZeroExt32to64 y)))))
+(Lsh32x32 x y) -> (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry <config.fe.TypeInt64()> (ADDIforC [-32] (ZeroExt32to64 y)))))
+
+// Potentially useful optimizing rewrites.
+// (ADDIforC [k] c), k < 0 && (c < 0 || k+c >= 0) -> CarrySet
+// (ADDIforC [k] c), K < 0 && (c >= 0 && k+c < 0) -> CarryClear
+// (MaskIfNotCarry CarrySet) -> 0
+// (MaskIfNotCarry CarrySet) -> -1
+
// Lowering constants
(Const8 [val]) -> (MOVWconst [val])
(Const16 [val]) -> (MOVWconst [val])
@@ -44,24 +68,24 @@
(OffPtr [off] ptr) -> (ADD (MOVDconst <config.Frontend().TypeInt64()> [off]) ptr)
(And64 x y) -> (AND x y)
-(And32 x y) -> (AND (ZeroExt32to64 x) (ZeroExt32to64 y)) // Or? (AND (ZeroExt32to64 x) (ZeroExt32to64 y))
-(And16 x y) -> (AND (ZeroExt16to64 x) (ZeroExt16to64 y))
-(And8 x y) -> (AND (ZeroExt8to64 x) (ZeroExt8to64 y))
+(And32 x y) -> (AND x y)
+(And16 x y) -> (AND x y)
+(And8 x y) -> (AND x y)
(Or64 x y) -> (OR x y)
-(Or32 x y) -> (OR (ZeroExt32to64 x) (ZeroExt32to64 y))
-(Or16 x y) -> (OR (ZeroExt16to64 x) (ZeroExt16to64 y))
-(Or8 x y) -> (OR (ZeroExt8to64 x) (ZeroExt8to64 y))
+(Or32 x y) -> (OR x y)
+(Or16 x y) -> (OR x y)
+(Or8 x y) -> (OR x y)
(Xor64 x y) -> (XOR x y)
-(Xor32 x y) -> (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))
-(Xor16 x y) -> (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))
-(Xor8 x y) -> (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))
+(Xor32 x y) -> (XOR x y)
+(Xor16 x y) -> (XOR x y)
+(Xor8 x y) -> (XOR x y)
(Neg64 x) -> (NEG x)
-(Neg32 x) -> (NEG (ZeroExt32to64 x))
-(Neg16 x) -> (NEG (ZeroExt16to64 x))
-(Neg8 x) -> (NEG (ZeroExt8to64 x))
+(Neg32 x) -> (NEG x)
+(Neg16 x) -> (NEG x)
+(Neg8 x) -> (NEG x)
// Lowering comparisons
(Eq8 x y) -> (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
@@ -132,15 +156,105 @@
(If (GreaterThan cc) yes no) -> (GT cc yes no)
(If (GreaterEqual cc) yes no) -> (GE cc yes no)
-(If cond yes no) -> (NE (CMPconst [0] cond) yes no)
+(If cond yes no) -> (NE (CMPWconst [0] cond) yes no)
// Absorb boolean tests into block
-(NE (CMPconst [0] (Equal cc)) yes no) -> (EQ cc yes no)
-(NE (CMPconst [0] (NotEqual cc)) yes no) -> (NE cc yes no)
-(NE (CMPconst [0] (LessThan cc)) yes no) -> (LT cc yes no)
-(NE (CMPconst [0] (LessEqual cc)) yes no) -> (LE cc yes no)
-(NE (CMPconst [0] (GreaterThan cc)) yes no) -> (GT cc yes no)
-(NE (CMPconst [0] (GreaterEqual cc)) yes no) -> (GE cc yes no)
+(NE (CMPWconst [0] (Equal cc)) yes no) -> (EQ cc yes no)
+(NE (CMPWconst [0] (NotEqual cc)) yes no) -> (NE cc yes no)
+(NE (CMPWconst [0] (LessThan cc)) yes no) -> (LT cc yes no)
+(NE (CMPWconst [0] (LessEqual cc)) yes no) -> (LE cc yes no)
+(NE (CMPWconst [0] (GreaterThan cc)) yes no) -> (GT cc yes no)
+(NE (CMPWconst [0] (GreaterEqual cc)) yes no) -> (GE cc yes no)
+
+// absorb flag constants into branches
+(EQ (FlagEQ) yes no) -> (First nil yes no)
+(EQ (FlagLT) yes no) -> (First nil no yes)
+(EQ (FlagGT) yes no) -> (First nil no yes)
+
+(NE (FlagEQ) yes no) -> (First nil no yes)
+(NE (FlagLT) yes no) -> (First nil yes no)
+(NE (FlagGT) yes no) -> (First nil yes no)
+
+(LT (FlagEQ) yes no) -> (First nil no yes)
+(LT (FlagLT) yes no) -> (First nil yes no)
+(LT (FlagGT) yes no) -> (First nil no yes)
+
+(LE (FlagEQ) yes no) -> (First nil yes no)
+(LE (FlagLT) yes no) -> (First nil yes no)
+(LE (FlagGT) yes no) -> (First nil no yes)
+
+(GT (FlagEQ) yes no) -> (First nil no yes)
+(GT (FlagLT) yes no) -> (First nil no yes)
+(GT (FlagGT) yes no) -> (First nil yes no)
+
+(GE (FlagEQ) yes no) -> (First nil yes no)
+(GE (FlagLT) yes no) -> (First nil no yes)
+(GE (FlagGT) yes no) -> (First nil yes no)
+
+// absorb InvertFlags into branches
+(LT (InvertFlags cmp) yes no) -> (GT cmp yes no)
+(GT (InvertFlags cmp) yes no) -> (LT cmp yes no)
+(LE (InvertFlags cmp) yes no) -> (GE cmp yes no)
+(GE (InvertFlags cmp) yes no) -> (LE cmp yes no)
+(EQ (InvertFlags cmp) yes no) -> (EQ cmp yes no)
+(NE (InvertFlags cmp) yes no) -> (NE cmp yes no)
+
+// constant comparisons
+(CMPWconst (MOVWconst [x]) [y]) && int32(x)==int32(y) -> (FlagEQ)
+(CMPWconst (MOVWconst [x]) [y]) && int32(x)<int32(y) -> (FlagLT)
+(CMPWconst (MOVWconst [x]) [y]) && int32(x)>int32(y) -> (FlagGT)
+
+(CMPconst (MOVDconst [x]) [y]) && int64(x)==int64(y) -> (FlagEQ)
+(CMPconst (MOVDconst [x]) [y]) && int64(x)<int64(y) -> (FlagLT)
+(CMPconst (MOVDconst [x]) [y]) && int64(x)>int64(y) -> (FlagGT)
+
+(CMPWUconst (MOVWconst [x]) [y]) && int32(x)==int32(y) -> (FlagEQ)
+(CMPWUconst (MOVWconst [x]) [y]) && uint32(x)<uint32(y) -> (FlagLT)
+(CMPWUconst (MOVWconst [x]) [y]) && uint32(x)>uint32(y) -> (FlagGT)
+
+(CMPUconst (MOVDconst [x]) [y]) && int64(x)==int64(y) -> (FlagEQ)
+(CMPUconst (MOVDconst [x]) [y]) && uint64(x)<uint64(y) -> (FlagLT)
+(CMPUconst (MOVDconst [x]) [y]) && uint64(x)>uint64(y) -> (FlagGT)
+
+// other known comparisons
+//(CMPconst (MOVBUreg _) [c]) && 0xff < c -> (FlagLT)
+//(CMPconst (MOVHUreg _) [c]) && 0xffff < c -> (FlagLT)
+//(CMPconst (ANDconst _ [m]) [n]) && 0 <= int32(m) && int32(m) < int32(n) -> (FlagLT)
+//(CMPconst (SRLconst _ [c]) [n]) && 0 <= n && 0 < c && c <= 32 && (1<<uint32(32-c)) <= uint32(n) -> (FlagLT)
+
+// absorb flag constants into boolean values
+(Equal (FlagEQ)) -> (MOVWconst [1])
+(Equal (FlagLT)) -> (MOVWconst [0])
+(Equal (FlagGT)) -> (MOVWconst [0])
+
+(NotEqual (FlagEQ)) -> (MOVWconst [0])
+(NotEqual (FlagLT)) -> (MOVWconst [1])
+(NotEqual (FlagGT)) -> (MOVWconst [1])
+
+(LessThan (FlagEQ)) -> (MOVWconst [0])
+(LessThan (FlagLT)) -> (MOVWconst [1])
+(LessThan (FlagGT)) -> (MOVWconst [0])
+
+(LessEqual (FlagEQ)) -> (MOVWconst [1])
+(LessEqual (FlagLT)) -> (MOVWconst [1])
+(LessEqual (FlagGT)) -> (MOVWconst [0])
+
+(GreaterThan (FlagEQ)) -> (MOVWconst [0])
+(GreaterThan (FlagLT)) -> (MOVWconst [0])
+(GreaterThan (FlagGT)) -> (MOVWconst [1])
+
+(GreaterEqual (FlagEQ)) -> (MOVWconst [1])
+(GreaterEqual (FlagLT)) -> (MOVWconst [0])
+(GreaterEqual (FlagGT)) -> (MOVWconst [1])
+
+// absorb InvertFlags into boolean values
+(Equal (InvertFlags x)) -> (Equal x)
+(NotEqual (InvertFlags x)) -> (NotEqual x)
+(LessThan (InvertFlags x)) -> (GreaterThan x)
+(GreaterThan (InvertFlags x)) -> (LessThan x)
+(LessEqual (InvertFlags x)) -> (GreaterEqual x)
+(GreaterEqual (InvertFlags x)) -> (LessEqual x)
+
// Lowering loads
(Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) -> (MOVDload ptr mem)