// Rotate generation with non-const shift
// these match patterns from math/bits/RotateLeft[32|64], but there could be others
-(ADD (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
-(ADD (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
-( OR (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
-( OR (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
-(XOR (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
-(XOR (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))) => (ROTL x y)
+(ADD (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))) => (ROTL x y)
+(ADD (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))) => (ROTL x y)
+( OR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y)))))=> (ROTL x y)
+( OR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))) => (ROTL x y)
+(XOR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))) => (ROTL x y)
+(XOR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))) => (ROTL x y)
-(ADD (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
-(ADD (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
-( OR (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
-( OR (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
-(XOR (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
-(XOR (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))) => (ROTLW x y)
+(ADD (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
+(ADD (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
+( OR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
+( OR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
+(XOR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
+(XOR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))) => (ROTLW x y)
// Lowering rotates
(ROTL x (MOVDconst [c])) => (ROTLconst x [c&63])
// Combine rotate and mask operations
-(ANDconst [m] (ROTLWconst [r] x)) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,m,32)] x)
+(Select0 (ANDCCconst [m] (ROTLWconst [r] x))) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,m,32)] x)
(AND (MOVDconst [m]) (ROTLWconst [r] x)) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,m,32)] x)
-(ANDconst [m] (ROTLW x r)) && isPPC64WordRotateMask(m) => (RLWNM [encodePPC64RotateMask(0,m,32)] x r)
+(Select0 (ANDCCconst [m] (ROTLW x r))) && isPPC64WordRotateMask(m) => (RLWNM [encodePPC64RotateMask(0,m,32)] x r)
(AND (MOVDconst [m]) (ROTLW x r)) && isPPC64WordRotateMask(m) => (RLWNM [encodePPC64RotateMask(0,m,32)] x r)
// Note, any rotated word bitmask is still a valid word bitmask.
(ROTLWconst [r] (AND (MOVDconst [m]) x)) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,rotateLeft32(m,r),32)] x)
-(ROTLWconst [r] (ANDconst [m] x)) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,rotateLeft32(m,r),32)] x)
+(ROTLWconst [r] (Select0 (ANDCCconst [m] x))) && isPPC64WordRotateMask(m) => (RLWINM [encodePPC64RotateMask(r,rotateLeft32(m,r),32)] x)
-(ANDconst [m] (SRWconst x [s])) && mergePPC64RShiftMask(m,s,32) == 0 => (MOVDconst [0])
-(ANDconst [m] (SRWconst x [s])) && mergePPC64AndSrwi(m,s) != 0 => (RLWINM [mergePPC64AndSrwi(m,s)] x)
+(Select0 (ANDCCconst [m] (SRWconst x [s]))) && mergePPC64RShiftMask(m,s,32) == 0 => (MOVDconst [0])
+(Select0 (ANDCCconst [m] (SRWconst x [s]))) && mergePPC64AndSrwi(m,s) != 0 => (RLWINM [mergePPC64AndSrwi(m,s)] x)
(AND (MOVDconst [m]) (SRWconst x [s])) && mergePPC64RShiftMask(m,s,32) == 0 => (MOVDconst [0])
(AND (MOVDconst [m]) (SRWconst x [s])) && mergePPC64AndSrwi(m,s) != 0 => (RLWINM [mergePPC64AndSrwi(m,s)] x)
-(SRWconst (ANDconst [m] x) [s]) && mergePPC64RShiftMask(m>>uint(s),s,32) == 0 => (MOVDconst [0])
-(SRWconst (ANDconst [m] x) [s]) && mergePPC64AndSrwi(m>>uint(s),s) != 0 => (RLWINM [mergePPC64AndSrwi(m>>uint(s),s)] x)
+(SRWconst (Select0 (ANDCCconst [m] x)) [s]) && mergePPC64RShiftMask(m>>uint(s),s,32) == 0 => (MOVDconst [0])
+(SRWconst (Select0 (ANDCCconst [m] x)) [s]) && mergePPC64AndSrwi(m>>uint(s),s) != 0 => (RLWINM [mergePPC64AndSrwi(m>>uint(s),s)] x)
(SRWconst (AND (MOVDconst [m]) x) [s]) && mergePPC64RShiftMask(m>>uint(s),s,32) == 0 => (MOVDconst [0])
(SRWconst (AND (MOVDconst [m]) x) [s]) && mergePPC64AndSrwi(m>>uint(s),s) != 0 => (RLWINM [mergePPC64AndSrwi(m>>uint(s),s)] x)
// These are subexpressions found in statements that can become rotates
// In these cases the shift count is known to be < 64 so the more complicated expressions
// with Mask & Carry is not needed
-(Lsh64x64 x (AND y (MOVDconst [63]))) => (SLD x (ANDconst <typ.Int64> [63] y))
-(Lsh64x64 x (ANDconst <typ.Int64> [63] y)) => (SLD x (ANDconst <typ.Int64> [63] y))
-(Rsh64Ux64 x (AND y (MOVDconst [63]))) => (SRD x (ANDconst <typ.Int64> [63] y))
-(Rsh64Ux64 x (ANDconst <typ.UInt> [63] y)) => (SRD x (ANDconst <typ.UInt> [63] y))
-(Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))) => (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
-(Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y))) => (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
-(Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63])))) => (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
-(Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63])))) => (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
-(Rsh64x64 x (AND y (MOVDconst [63]))) => (SRAD x (ANDconst <typ.Int64> [63] y))
-(Rsh64x64 x (ANDconst <typ.UInt> [63] y)) => (SRAD x (ANDconst <typ.UInt> [63] y))
-(Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))) => (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
-(Rsh64x64 x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y))) => (SRAD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
-(Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63])))) => (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
-(Rsh64x64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63])))) => (SRAD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
+(Lsh64x64 x (AND y (MOVDconst [63]))) => (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
+(Lsh64x64 x (Select0 (ANDCCconst <typ.Int64> [63] y))) => (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
+(Rsh64Ux64 x (AND y (MOVDconst [63]))) => (SRD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
+(Rsh64Ux64 x (Select0 (ANDCCconst <typ.UInt> [63] y))) => (SRD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
+(Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y)))) => (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y)))) => (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63])))) => (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63])))) => (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64x64 x (AND y (MOVDconst [63]))) => (SRAD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
+(Rsh64x64 x (Select0 (ANDCCconst <typ.UInt> [63] y))) => (SRAD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
+(Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y)))) => (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64x64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y)))) => (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63])))) => (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
+(Rsh64x64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63])))) => (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
(Lsh64x64 x y) => (SLD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
(Rsh64x64 x y) => (SRAD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
(Rsh64Ux64 x y) => (SRD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
-(Lsh32x64 x (AND y (MOVDconst [31]))) => (SLW x (ANDconst <typ.Int32> [31] y))
-(Lsh32x64 x (ANDconst <typ.Int32> [31] y)) => (SLW x (ANDconst <typ.Int32> [31] y))
+(Lsh32x64 x (AND y (MOVDconst [31]))) => (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
+(Lsh32x64 x (Select0 <typ.Int32> (ANDCCconst [31] y))) => (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
-(Rsh32Ux64 x (AND y (MOVDconst [31]))) => (SRW x (ANDconst <typ.Int32> [31] y))
-(Rsh32Ux64 x (ANDconst <typ.UInt> [31] y)) => (SRW x (ANDconst <typ.UInt> [31] y))
-(Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))) => (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
-(Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y))) => (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
-(Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31])))) => (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
-(Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31])))) => (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+(Rsh32Ux64 x (AND y (MOVDconst [31]))) => (SRW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
+(Rsh32Ux64 x (Select0 (ANDCCconst <typ.UInt> [31] y))) => (SRW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
+(Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y)))) => (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y)))) => (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31])))) => (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31])))) => (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
-(Rsh32x64 x (AND y (MOVDconst [31]))) => (SRAW x (ANDconst <typ.Int32> [31] y))
-(Rsh32x64 x (ANDconst <typ.UInt> [31] y)) => (SRAW x (ANDconst <typ.UInt> [31] y))
-(Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))) => (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
-(Rsh32x64 x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y))) => (SRAW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
-(Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31])))) => (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
-(Rsh32x64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31])))) => (SRAW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+(Rsh32x64 x (AND y (MOVDconst [31]))) => (SRAW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
+(Rsh32x64 x (Select0 (ANDCCconst <typ.UInt> [31] y))) => (SRAW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
+(Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y)))) => (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32x64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y)))) => (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31])))) => (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
+(Rsh32x64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31])))) => (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
(Rsh32x64 x y) => (SRAW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
(Rsh32Ux64 x y) => (SRW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
(Lsh8x8 x y) => (SLW x (ISEL [0] y (MOVDconst [-1]) (CMPU (ZeroExt8to64 y) (MOVDconst [8]))))
// Cleaning up shift ops
-(ISEL [0] (ANDconst [d] y) (MOVDconst [-1]) (CMPU (ANDconst [d] y) (MOVDconst [c]))) && c >= d => (ANDconst [d] y)
-(ISEL [0] (ANDconst [d] y) (MOVDconst [-1]) (CMPUconst [c] (ANDconst [d] y))) && c >= d => (ANDconst [d] y)
+(ISEL [0] (Select0 (ANDCCconst [d] y)) (MOVDconst [-1]) (CMPU (Select0 (ANDCCconst [d] y)) (MOVDconst [c]))) && c >= d => (Select0 (ANDCCconst [d] y))
+(ISEL [0] (Select0 (ANDCCconst [d] y)) (MOVDconst [-1]) (CMPUconst [c] (Select0 (ANDCCconst [d] y)))) && c >= d => (Select0 (ANDCCconst [d] y))
(ORN x (MOVDconst [-1])) => x
(S(RAD|RD|LD) x (MOVDconst [c])) => (S(RAD|RD|LD)const [c&63 | (c>>6&1*63)] x)
(OR x (NOR y y)) => (ORN x y)
// Lowering comparisons
-(EqB x y) => (ANDconst [1] (EQV x y))
+(EqB x y) => (Select0 <typ.Int> (ANDCCconst [1] (EQV x y)))
// Sign extension dependence on operand sign sets up for sign/zero-extension elision later
(Eq8 x y) && isSigned(x.Type) && isSigned(y.Type) => (Equal (CMPW (SignExt8to32 x) (SignExt8to32 y)))
(Eq16 x y) && isSigned(x.Type) && isSigned(y.Type) => (Equal (CMPW (SignExt16to32 x) (SignExt16to32 y)))
(If (FGreaterThan cc) yes no) => (FGT cc yes no)
(If (FGreaterEqual cc) yes no) => (FGE cc yes no)
-(If cond yes no) => (NE (CMPWconst [0] (ANDconst <typ.UInt32> [1] cond)) yes no)
+(If cond yes no) => (NE (CMPWconst [0] (Select0 <typ.UInt32> (ANDCCconst [1] cond))) yes no)
// Absorb boolean tests into block
-(NE (CMPWconst [0] (ANDconst [1] (Equal cc))) yes no) => (EQ cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (NotEqual cc))) yes no) => (NE cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (LessThan cc))) yes no) => (LT cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (LessEqual cc))) yes no) => (LE cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (GreaterThan cc))) yes no) => (GT cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (GreaterEqual cc))) yes no) => (GE cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (FLessThan cc))) yes no) => (FLT cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (FLessEqual cc))) yes no) => (FLE cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (FGreaterThan cc))) yes no) => (FGT cc yes no)
-(NE (CMPWconst [0] (ANDconst [1] (FGreaterEqual cc))) yes no) => (FGE cc yes no)
-
-// Elide compares of bit tests // TODO need to make both CC and result of ANDCC available.
-(EQ (CMPconst [0] (ANDconst [c] x)) yes no) => (EQ (ANDCCconst [c] x) yes no)
-(NE (CMPconst [0] (ANDconst [c] x)) yes no) => (NE (ANDCCconst [c] x) yes no)
-(EQ (CMPWconst [0] (ANDconst [c] x)) yes no) => (EQ (ANDCCconst [c] x) yes no)
-(NE (CMPWconst [0] (ANDconst [c] x)) yes no) => (NE (ANDCCconst [c] x) yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (Equal cc)))) yes no) => (EQ cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (NotEqual cc)))) yes no) => (NE cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (LessThan cc)))) yes no) => (LT cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (LessEqual cc)))) yes no) => (LE cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (GreaterThan cc)))) yes no) => (GT cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (GreaterEqual cc)))) yes no) => (GE cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FLessThan cc)))) yes no) => (FLT cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FLessEqual cc)))) yes no) => (FLE cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FGreaterThan cc)))) yes no) => (FGT cc yes no)
+(NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FGreaterEqual cc)))) yes no) => (FGE cc yes no)
+
+// Elide compares of bit tests
+((EQ|NE) (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no) => ((EQ|NE) (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
+((EQ|NE) (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no) => ((EQ|NE) (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
// absorb flag constants into branches
(EQ (FlagEQ) yes no) => (First yes no)
(LessEqual (InvertFlags x)) => (GreaterEqual x)
(GreaterEqual (InvertFlags x)) => (LessEqual x)
-// Elide compares of bit tests // TODO need to make both CC and result of ANDCC available.
-((EQ|NE|LT|LE|GT|GE) (CMPconst [0] (ANDconst [c] x)) yes no) => ((EQ|NE|LT|LE|GT|GE) (ANDCCconst [c] x) yes no)
-((EQ|NE|LT|LE|GT|GE) (CMPWconst [0] (ANDconst [c] x)) yes no) => ((EQ|NE|LT|LE|GT|GE) (ANDCCconst [c] x) yes no)
+// Elide compares of bit tests
+((EQ|NE|LT|LE|GT|GE) (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no) => ((EQ|NE|LT|LE|GT|GE) (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
+((EQ|NE|LT|LE|GT|GE) (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no) => ((EQ|NE|LT|LE|GT|GE) (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(AND x y)) yes no) && z.Uses == 1 => ((EQ|NE|LT|LE|GT|GE) (ANDCC x y) yes no)
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(OR x y)) yes no) && z.Uses == 1 => ((EQ|NE|LT|LE|GT|GE) (ORCC x y) yes no)
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(XOR x y)) yes no) && z.Uses == 1 => ((EQ|NE|LT|LE|GT|GE) (XORCC x y) yes no)
(NOR (MOVDconst [c]) (MOVDconst [d])) => (MOVDconst [^(c|d)])
// Discover consts
-(AND x (MOVDconst [c])) && isU16Bit(c) => (ANDconst [c] x)
+(AND x (MOVDconst [c])) && isU16Bit(c) => (Select0 (ANDCCconst [c] x))
(XOR x (MOVDconst [c])) && isU32Bit(c) => (XORconst [c] x)
(OR x (MOVDconst [c])) && isU32Bit(c) => (ORconst [c] x)
// Simplify consts
-(ANDconst [c] (ANDconst [d] x)) => (ANDconst [c&d] x)
+(Select0 (ANDCCconst [c] (Select0 (ANDCCconst [d] x)))) => (Select0 (ANDCCconst [c&d] x))
(ORconst [c] (ORconst [d] x)) => (ORconst [c|d] x)
(XORconst [c] (XORconst [d] x)) => (XORconst [c^d] x)
-(ANDconst [-1] x) => x
-(ANDconst [0] _) => (MOVDconst [0])
+(Select0 (ANDCCconst [-1] x)) => x
+(Select0 (ANDCCconst [0] _)) => (MOVDconst [0])
(XORconst [0] x) => x
(ORconst [-1] _) => (MOVDconst [-1])
(ORconst [0] x) => x
// zero-extend of small and => small and
-(MOVBZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFF => y
-(MOVHZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFF => y
-(MOVWZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFFFFFF => y
+(MOVBZreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0xFF => y
+(MOVHZreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0xFFFF => y
+(MOVWZreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0xFFFFFFFF => y
(MOVWZreg y:(AND (MOVDconst [c]) _)) && uint64(c) <= 0xFFFFFFFF => y
// sign extend of small-positive and => small-positive-and
-(MOVBreg y:(ANDconst [c] _)) && uint64(c) <= 0x7F => y
-(MOVHreg y:(ANDconst [c] _)) && uint64(c) <= 0x7FFF => y
-(MOVWreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFF => y // 0xFFFF is largest immediate constant, when regarded as 32-bit is > 0
+(MOVBreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0x7F => y
+(MOVHreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0x7FFF => y
+(MOVWreg y:(Select0 (ANDCCconst [c] _))) && uint64(c) <= 0xFFFF => y // 0xFFFF is largest immediate constant, when regarded as 32-bit is > 0
(MOVWreg y:(AND (MOVDconst [c]) _)) && uint64(c) <= 0x7FFFFFFF => y
// small and of zero-extend => either zero-extend or small and
-(ANDconst [c] y:(MOVBZreg _)) && c&0xFF == 0xFF => y
-(ANDconst [0xFF] y:(MOVBreg _)) => y
-(ANDconst [c] y:(MOVHZreg _)) && c&0xFFFF == 0xFFFF => y
-(ANDconst [0xFFFF] y:(MOVHreg _)) => y
+(Select0 (ANDCCconst [c] y:(MOVBZreg _))) && c&0xFF == 0xFF => y
+(Select0 (ANDCCconst [0xFF] y:(MOVBreg _))) => y
+(Select0 (ANDCCconst [c] y:(MOVHZreg _))) && c&0xFFFF == 0xFFFF => y
+(Select0 (ANDCCconst [0xFFFF] y:(MOVHreg _))) => y
(AND (MOVDconst [c]) y:(MOVWZreg _)) && c&0xFFFFFFFF == 0xFFFFFFFF => y
(AND (MOVDconst [0xFFFFFFFF]) y:(MOVWreg x)) => (MOVWZreg x)
// normal case
-(ANDconst [c] (MOV(B|BZ)reg x)) => (ANDconst [c&0xFF] x)
-(ANDconst [c] (MOV(H|HZ)reg x)) => (ANDconst [c&0xFFFF] x)
-(ANDconst [c] (MOV(W|WZ)reg x)) => (ANDconst [c&0xFFFFFFFF] x)
+(Select0 (ANDCCconst [c] (MOV(B|BZ)reg x))) => (Select0 (ANDCCconst [c&0xFF] x))
+(Select0 (ANDCCconst [c] (MOV(H|HZ)reg x))) => (Select0 (ANDCCconst [c&0xFFFF] x))
+(Select0 (ANDCCconst [c] (MOV(W|WZ)reg x))) => (Select0 (ANDCCconst [c&0xFFFFFFFF] x))
// Eliminate unnecessary sign/zero extend following right shift
(MOV(B|H|W)Zreg (SRWconst [c] (MOVBZreg x))) => (SRWconst [c] (MOVBZreg x))
(MOVBZreg ((OR|XOR|AND) <t> x (MOVHZreg y))) => (MOVBZreg ((OR|XOR|AND) <t> x y))
(MOVBZreg ((OR|XOR|AND) <t> x (MOVBZreg y))) => (MOVBZreg ((OR|XOR|AND) <t> x y))
-(MOV(B|H|W)Zreg z:(ANDconst [c] (MOVBZload ptr x))) => z
+(MOV(B|H|W)Zreg z:(Select0 (ANDCCconst [c] (MOVBZload ptr x)))) => z
(MOVBZreg z:(AND y (MOVBZload ptr x))) => z
-(MOV(H|W)Zreg z:(ANDconst [c] (MOVHZload ptr x))) => z
+(MOV(H|W)Zreg z:(Select0 (ANDCCconst [c] (MOVHZload ptr x)))) => z
(MOVHZreg z:(AND y (MOVHZload ptr x))) => z
-(MOVWZreg z:(ANDconst [c] (MOVWZload ptr x))) => z
+(MOVWZreg z:(Select0 (ANDCCconst [c] (MOVWZload ptr x)))) => z
(MOVWZreg z:(AND y (MOVWZload ptr x))) => z
// Arithmetic constant ops
(SLDconst [c] z:(MOVHZreg x)) && c < 16 && z.Uses == 1 => (CLRLSLDI [newPPC64ShiftAuxInt(c,48,63,64)] x)
(SLDconst [c] z:(MOVWZreg x)) && c < 32 && z.Uses == 1 => (CLRLSLDI [newPPC64ShiftAuxInt(c,32,63,64)] x)
-(SLDconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d)) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
+(SLDconst [c] z:(Select0 (ANDCCconst [d] x))) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d)) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
(SLDconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(64-getPPC64ShiftMaskLength(d)) => (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
(SLWconst [c] z:(MOVBZreg x)) && z.Uses == 1 && c < 8 => (CLRLSLWI [newPPC64ShiftAuxInt(c,24,31,32)] x)
(SLWconst [c] z:(MOVHZreg x)) && z.Uses == 1 && c < 16 => (CLRLSLWI [newPPC64ShiftAuxInt(c,16,31,32)] x)
-(SLWconst [c] z:(ANDconst [d] x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d)) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
+(SLWconst [c] z:(Select0 (ANDCCconst [d] x))) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d)) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
(SLWconst [c] z:(AND (MOVDconst [d]) x)) && z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d)) => (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
// special case for power9
(SL(W|D)const [c] z:(MOVWreg x)) && c < 32 && buildcfg.GOPPC64 >= 9 => (EXTSWSLconst [c] x)
(ISEL [4] x _ (Flag(EQ|GT))) => x
(ISEL [4] _ y (FlagLT)) => y
+(ISEL [2] x y (CMPconst [0] (Select0 (ANDCCconst [1] z)))) => (ISEL [2] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+(ISEL [6] x y (CMPconst [0] (Select0 (ANDCCconst [1] z)))) => (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+(ISELB [2] x (CMPconst [0] (Select0 (ANDCCconst [1] z)))) => (XORconst [1] (Select0 <typ.UInt64> (ANDCCconst [1] z )))
+(ISELB [6] x (CMPconst [0] (Select0 (ANDCCconst [1] z)))) => (Select0 <typ.UInt64> (ANDCCconst [1] z ))
+
+(ISEL [2] x y (CMPWconst [0] (Select0 (ANDCCconst [1] z)))) => (ISEL [2] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+(ISEL [6] x y (CMPWconst [0] (Select0 (ANDCCconst [1] z)))) => (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+(ISELB [2] x (CMPWconst [0] (Select0 (ANDCCconst [1] z)))) => (XORconst [1] (Select0 <typ.UInt64> (ANDCCconst [1] z )))
+(ISELB [6] x (CMPWconst [0] (Select0 (ANDCCconst [1] z)))) => (Select0 <typ.UInt64> (ANDCCconst [1] z ))
+
(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 0 => (ISELB [n+1] (MOVDconst [1]) bool)
(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 1 => (ISELB [n-1] (MOVDconst [1]) bool)
(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 2 => (ISELB [n] (MOVDconst [1]) bool)
(XORconst [1] (ISELB [4] (MOVDconst [1]) cmp)) => (ISELB [0] (MOVDconst [1]) cmp)
// A particular pattern seen in cgo code:
-(AND (MOVDconst [c]) x:(MOVBZload _ _)) => (ANDconst [c&0xFF] x)
+(AND (MOVDconst [c]) x:(MOVBZload _ _)) => (Select0 (ANDCCconst [c&0xFF] x))
// floating point negative abs
(FNEG (FABS x)) => (FNABS x)
return rewriteValuePPC64_OpPPC64AND(v)
case OpPPC64ANDN:
return rewriteValuePPC64_OpPPC64ANDN(v)
- case OpPPC64ANDconst:
- return rewriteValuePPC64_OpPPC64ANDconst(v)
case OpPPC64CLRLSLDI:
return rewriteValuePPC64_OpPPC64CLRLSLDI(v)
case OpPPC64CMP:
b := v.Block
typ := &b.Func.Config.Types
// match: (EqB x y)
- // result: (ANDconst [1] (EQV x y))
+ // result: (Select0 <typ.Int> (ANDCCconst [1] (EQV x y)))
for {
x := v_0
y := v_1
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(1)
- v0 := b.NewValue0(v.Pos, OpPPC64EQV, typ.Int64)
- v0.AddArg2(x, y)
+ v.reset(OpSelect0)
+ v.Type = typ.Int
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(1)
+ v1 := b.NewValue0(v.Pos, OpPPC64EQV, typ.Int64)
+ v1.AddArg2(x, y)
+ v0.AddArg(v1)
v.AddArg(v0)
return true
}
return true
}
// match: (Lsh32x64 x (AND y (MOVDconst [31])))
- // result: (SLW x (ANDconst <typ.Int32> [31] y))
+ // result: (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SLW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Lsh32x64 x (ANDconst <typ.Int32> [31] y))
- // result: (SLW x (ANDconst <typ.Int32> [31] y))
+ // match: (Lsh32x64 x (Select0 <typ.Int32> (ANDCCconst [31] y)))
+ // result: (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.Int32 || auxIntToInt64(v_1.AuxInt) != 31 {
+ if v_1.Op != OpSelect0 || v_1.Type != typ.Int32 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0.AuxInt) != 31 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SLW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
return true
}
// match: (Lsh64x64 x (AND y (MOVDconst [63])))
- // result: (SLD x (ANDconst <typ.Int64> [63] y))
+ // result: (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SLD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Lsh64x64 x (ANDconst <typ.Int64> [63] y))
- // result: (SLD x (ANDconst <typ.Int64> [63] y))
+ // match: (Lsh64x64 x (Select0 (ANDCCconst <typ.Int64> [63] y)))
+ // result: (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.Int64 || auxIntToInt64(v_1.AuxInt) != 63 {
+ if v_1.Op != OpSelect0 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.Int64 || auxIntToInt64(v_1_0.AuxInt) != 63 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SLD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
}
break
}
- // match: (ADD (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
+ // match: (ADD (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 63 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 63 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: (ADD (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y))))
+ // match: (ADD (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 63 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: (ADD (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y))))
+ // match: (ADD (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 31 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
}
break
}
- // match: (ADD (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
+ // match: (ADD (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 31 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 31 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
func rewriteValuePPC64_OpPPC64AND(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (AND (MOVDconst [m]) (ROTLWconst [r] x))
// cond: isPPC64WordRotateMask(m)
// result: (RLWINM [encodePPC64RotateMask(r,m,32)] x)
}
// match: (AND x (MOVDconst [c]))
// cond: isU16Bit(c)
- // result: (ANDconst [c] x)
+ // result: (Select0 (ANDCCconst [c] x))
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if !(isU16Bit(c)) {
continue
}
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c)
- v.AddArg(x)
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c)
+ v0.AddArg(x)
+ v.AddArg(v0)
return true
}
break
break
}
// match: (AND (MOVDconst [c]) x:(MOVBZload _ _))
- // result: (ANDconst [c&0xFF] x)
+ // result: (Select0 (ANDCCconst [c&0xFF] x))
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpPPC64MOVDconst {
if x.Op != OpPPC64MOVBZload {
continue
}
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFF)
- v.AddArg(x)
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(x.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
return true
}
break
}
return false
}
-func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
- v_0 := v.Args[0]
- // match: (ANDconst [m] (ROTLWconst [r] x))
- // cond: isPPC64WordRotateMask(m)
- // result: (RLWINM [encodePPC64RotateMask(r,m,32)] x)
- for {
- m := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ROTLWconst {
- break
- }
- r := auxIntToInt64(v_0.AuxInt)
- x := v_0.Args[0]
- if !(isPPC64WordRotateMask(m)) {
- break
- }
- v.reset(OpPPC64RLWINM)
- v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(r, m, 32))
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [m] (ROTLW x r))
- // cond: isPPC64WordRotateMask(m)
- // result: (RLWNM [encodePPC64RotateMask(0,m,32)] x r)
- for {
- m := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ROTLW {
- break
- }
- r := v_0.Args[1]
- x := v_0.Args[0]
- if !(isPPC64WordRotateMask(m)) {
- break
- }
- v.reset(OpPPC64RLWNM)
- v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 32))
- v.AddArg2(x, r)
- return true
- }
- // match: (ANDconst [m] (SRWconst x [s]))
- // cond: mergePPC64RShiftMask(m,s,32) == 0
- // result: (MOVDconst [0])
- for {
- m := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64SRWconst {
- break
- }
- s := auxIntToInt64(v_0.AuxInt)
- if !(mergePPC64RShiftMask(m, s, 32) == 0) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
- // match: (ANDconst [m] (SRWconst x [s]))
- // cond: mergePPC64AndSrwi(m,s) != 0
- // result: (RLWINM [mergePPC64AndSrwi(m,s)] x)
- for {
- m := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64SRWconst {
- break
- }
- s := auxIntToInt64(v_0.AuxInt)
- x := v_0.Args[0]
- if !(mergePPC64AndSrwi(m, s) != 0) {
- break
- }
- v.reset(OpPPC64RLWINM)
- v.AuxInt = int64ToAuxInt(mergePPC64AndSrwi(m, s))
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (ANDconst [d] x))
- // result: (ANDconst [c&d] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ANDconst {
- break
- }
- d := auxIntToInt64(v_0.AuxInt)
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & d)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [-1] x)
- // result: x
- for {
- if auxIntToInt64(v.AuxInt) != -1 {
- break
- }
- x := v_0
- v.copyOf(x)
- return true
- }
- // match: (ANDconst [0] _)
- // result: (MOVDconst [0])
- for {
- if auxIntToInt64(v.AuxInt) != 0 {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
- // match: (ANDconst [c] y:(MOVBZreg _))
- // cond: c&0xFF == 0xFF
- // result: y
- for {
- c := auxIntToInt64(v.AuxInt)
- y := v_0
- if y.Op != OpPPC64MOVBZreg || !(c&0xFF == 0xFF) {
- break
- }
- v.copyOf(y)
- return true
- }
- // match: (ANDconst [0xFF] y:(MOVBreg _))
- // result: y
- for {
- if auxIntToInt64(v.AuxInt) != 0xFF {
- break
- }
- y := v_0
- if y.Op != OpPPC64MOVBreg {
- break
- }
- v.copyOf(y)
- return true
- }
- // match: (ANDconst [c] y:(MOVHZreg _))
- // cond: c&0xFFFF == 0xFFFF
- // result: y
- for {
- c := auxIntToInt64(v.AuxInt)
- y := v_0
- if y.Op != OpPPC64MOVHZreg || !(c&0xFFFF == 0xFFFF) {
- break
- }
- v.copyOf(y)
- return true
- }
- // match: (ANDconst [0xFFFF] y:(MOVHreg _))
- // result: y
- for {
- if auxIntToInt64(v.AuxInt) != 0xFFFF {
- break
- }
- y := v_0
- if y.Op != OpPPC64MOVHreg {
- break
- }
- v.copyOf(y)
- return true
- }
- // match: (ANDconst [c] (MOVBreg x))
- // result: (ANDconst [c&0xFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVBreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFF)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (MOVBZreg x))
- // result: (ANDconst [c&0xFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVBZreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFF)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (MOVHreg x))
- // result: (ANDconst [c&0xFFFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVHreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFFFF)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (MOVHZreg x))
- // result: (ANDconst [c&0xFFFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVHZreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFFFF)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (MOVWreg x))
- // result: (ANDconst [c&0xFFFFFFFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVWreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFFFFFFFF)
- v.AddArg(x)
- return true
- }
- // match: (ANDconst [c] (MOVWZreg x))
- // result: (ANDconst [c&0xFFFFFFFF] x)
- for {
- c := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64MOVWZreg {
- break
- }
- x := v_0.Args[0]
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(c & 0xFFFFFFFF)
- v.AddArg(x)
- return true
- }
- return false
-}
func rewriteValuePPC64_OpPPC64CLRLSLDI(v *Value) bool {
v_0 := v.Args[0]
// match: (CLRLSLDI [c] (SRWconst [s] x))
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (ISEL [0] (ANDconst [d] y) (MOVDconst [-1]) (CMPU (ANDconst [d] y) (MOVDconst [c])))
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (ISEL [0] (Select0 (ANDCCconst [d] y)) (MOVDconst [-1]) (CMPU (Select0 (ANDCCconst [d] y)) (MOVDconst [c])))
// cond: c >= d
- // result: (ANDconst [d] y)
+ // result: (Select0 (ANDCCconst [d] y))
for {
- if auxIntToInt32(v.AuxInt) != 0 || v_0.Op != OpPPC64ANDconst {
+ if auxIntToInt32(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
break
}
- d := auxIntToInt64(v_0.AuxInt)
- y := v_0.Args[0]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ d := auxIntToInt64(v_0_0.AuxInt)
+ y := v_0_0.Args[0]
if v_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 || v_2.Op != OpPPC64CMPU {
break
}
_ = v_2.Args[1]
v_2_0 := v_2.Args[0]
- if v_2_0.Op != OpPPC64ANDconst || auxIntToInt64(v_2_0.AuxInt) != d || y != v_2_0.Args[0] {
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != d || y != v_2_0_0.Args[0] {
break
}
v_2_1 := v_2.Args[1]
if !(c >= d) {
break
}
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(d)
- v.AddArg(y)
- return true
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
}
- // match: (ISEL [0] (ANDconst [d] y) (MOVDconst [-1]) (CMPUconst [c] (ANDconst [d] y)))
+ // match: (ISEL [0] (Select0 (ANDCCconst [d] y)) (MOVDconst [-1]) (CMPUconst [c] (Select0 (ANDCCconst [d] y))))
// cond: c >= d
- // result: (ANDconst [d] y)
+ // result: (Select0 (ANDCCconst [d] y))
for {
- if auxIntToInt32(v.AuxInt) != 0 || v_0.Op != OpPPC64ANDconst {
+ if auxIntToInt32(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
break
}
- d := auxIntToInt64(v_0.AuxInt)
- y := v_0.Args[0]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ d := auxIntToInt64(v_0_0.AuxInt)
+ y := v_0_0.Args[0]
if v_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 || v_2.Op != OpPPC64CMPUconst {
break
}
c := auxIntToInt64(v_2.AuxInt)
v_2_0 := v_2.Args[0]
- if v_2_0.Op != OpPPC64ANDconst || auxIntToInt64(v_2_0.AuxInt) != d || y != v_2_0.Args[0] || !(c >= d) {
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != d || y != v_2_0_0.Args[0] || !(c >= d) {
break
}
- v.reset(OpPPC64ANDconst)
- v.AuxInt = int64ToAuxInt(d)
- v.AddArg(y)
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(d)
+ v0.AddArg(y)
+ v.AddArg(v0)
return true
}
// match: (ISEL [6] x y (CMPWconst [0] (ISELB [c] one cmp)))
v.copyOf(y)
return true
}
+ // match: (ISEL [2] x y (CMPconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (ISEL [2] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 2 {
+ break
+ }
+ x := v_0
+ y := v_1
+ if v_2.Op != OpPPC64CMPconst || auxIntToInt64(v_2.AuxInt) != 0 {
+ break
+ }
+ v_2_0 := v_2.Args[0]
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_2_0_0.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = int32ToAuxInt(2)
+ v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg3(x, y, v0)
+ return true
+ }
+ // match: (ISEL [6] x y (CMPconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 6 {
+ break
+ }
+ x := v_0
+ y := v_1
+ if v_2.Op != OpPPC64CMPconst || auxIntToInt64(v_2.AuxInt) != 0 {
+ break
+ }
+ v_2_0 := v_2.Args[0]
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_2_0_0.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = int32ToAuxInt(6)
+ v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg3(x, y, v0)
+ return true
+ }
+ // match: (ISEL [2] x y (CMPWconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (ISEL [2] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 2 {
+ break
+ }
+ x := v_0
+ y := v_1
+ if v_2.Op != OpPPC64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
+ break
+ }
+ v_2_0 := v_2.Args[0]
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_2_0_0.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = int32ToAuxInt(2)
+ v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg3(x, y, v0)
+ return true
+ }
+ // match: (ISEL [6] x y (CMPWconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 6 {
+ break
+ }
+ x := v_0
+ y := v_1
+ if v_2.Op != OpPPC64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
+ break
+ }
+ v_2_0 := v_2.Args[0]
+ if v_2_0.Op != OpSelect0 {
+ break
+ }
+ v_2_0_0 := v_2_0.Args[0]
+ if v_2_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_2_0_0.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = int32ToAuxInt(6)
+ v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg3(x, y, v0)
+ return true
+ }
// match: (ISEL [n] x y (InvertFlags bool))
// cond: n%4 == 0
// result: (ISEL [n+1] x y bool)
v.AuxInt = int64ToAuxInt(1)
return true
}
+ // match: (ISELB [2] x (CMPconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (XORconst [1] (Select0 <typ.UInt64> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 2 {
+ break
+ }
+ if v_1.Op != OpPPC64CMPconst || auxIntToInt64(v_1.AuxInt) != 0 {
+ break
+ }
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSelect0 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_1_0_0.Args[0]
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = int64ToAuxInt(1)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (ISELB [6] x (CMPconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (Select0 <typ.UInt64> (ANDCCconst [1] z ))
+ for {
+ if auxIntToInt32(v.AuxInt) != 6 {
+ break
+ }
+ if v_1.Op != OpPPC64CMPconst || auxIntToInt64(v_1.AuxInt) != 0 {
+ break
+ }
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSelect0 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_1_0_0.Args[0]
+ v.reset(OpSelect0)
+ v.Type = typ.UInt64
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(1)
+ v0.AddArg(z)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (ISELB [2] x (CMPWconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (XORconst [1] (Select0 <typ.UInt64> (ANDCCconst [1] z )))
+ for {
+ if auxIntToInt32(v.AuxInt) != 2 {
+ break
+ }
+ if v_1.Op != OpPPC64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
+ break
+ }
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSelect0 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_1_0_0.Args[0]
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = int64ToAuxInt(1)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(1)
+ v1.AddArg(z)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (ISELB [6] x (CMPWconst [0] (Select0 (ANDCCconst [1] z))))
+ // result: (Select0 <typ.UInt64> (ANDCCconst [1] z ))
+ for {
+ if auxIntToInt32(v.AuxInt) != 6 {
+ break
+ }
+ if v_1.Op != OpPPC64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
+ break
+ }
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSelect0 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0_0.AuxInt) != 1 {
+ break
+ }
+ z := v_1_0_0.Args[0]
+ v.reset(OpSelect0)
+ v.Type = typ.UInt64
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(1)
+ v0.AddArg(z)
+ v.AddArg(v0)
+ return true
+ }
// match: (ISELB [n] (MOVDconst [1]) (InvertFlags bool))
// cond: n%4 == 0
// result: (ISELB [n+1] (MOVDconst [1]) bool)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVBZreg y:(ANDconst [c] _))
+ // match: (MOVBZreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0xFF
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
break
}
- c := auxIntToInt64(y.AuxInt)
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0xFF) {
break
}
}
break
}
- // match: (MOVBZreg z:(ANDconst [c] (MOVBZload ptr x)))
+ // match: (MOVBZreg z:(Select0 (ANDCCconst [c] (MOVBZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVBZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVBZload {
break
}
v.copyOf(z)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVBreg y:(ANDconst [c] _))
+ // match: (MOVBreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0x7F
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
break
}
- c := auxIntToInt64(y.AuxInt)
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0x7F) {
break
}
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVHZreg y:(ANDconst [c] _))
+ // match: (MOVHZreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0xFFFF
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
break
}
- c := auxIntToInt64(y.AuxInt)
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0xFFFF) {
break
}
}
break
}
- // match: (MOVHZreg z:(ANDconst [c] (MOVBZload ptr x)))
+ // match: (MOVHZreg z:(Select0 (ANDCCconst [c] (MOVBZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVBZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVBZload {
break
}
v.copyOf(z)
return true
}
- // match: (MOVHZreg z:(ANDconst [c] (MOVHZload ptr x)))
+ // match: (MOVHZreg z:(Select0 (ANDCCconst [c] (MOVHZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVHZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVHZload {
break
}
v.copyOf(z)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVHreg y:(ANDconst [c] _))
+ // match: (MOVHreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0x7FFF
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
break
}
- c := auxIntToInt64(y.AuxInt)
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0x7FFF) {
break
}
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVWZreg y:(ANDconst [c] _))
+ // match: (MOVWZreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0xFFFFFFFF
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
break
}
- c := auxIntToInt64(y.AuxInt)
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0xFFFFFFFF) {
break
}
}
break
}
- // match: (MOVWZreg z:(ANDconst [c] (MOVBZload ptr x)))
+ // match: (MOVWZreg z:(Select0 (ANDCCconst [c] (MOVBZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVBZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVBZload {
break
}
v.copyOf(z)
return true
}
- // match: (MOVWZreg z:(ANDconst [c] (MOVHZload ptr x)))
+ // match: (MOVWZreg z:(Select0 (ANDCCconst [c] (MOVHZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVHZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVHZload {
break
}
v.copyOf(z)
return true
}
- // match: (MOVWZreg z:(ANDconst [c] (MOVWZload ptr x)))
+ // match: (MOVWZreg z:(Select0 (ANDCCconst [c] (MOVWZload ptr x))))
// result: z
for {
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
z_0 := z.Args[0]
- if z_0.Op != OpPPC64MOVWZload {
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ z_0_0 := z_0.Args[0]
+ if z_0_0.Op != OpPPC64MOVWZload {
break
}
v.copyOf(z)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (MOVWreg y:(ANDconst [c] _))
+ // match: (MOVWreg y:(Select0 (ANDCCconst [c] _)))
// cond: uint64(c) <= 0xFFFF
// result: y
for {
y := v_0
- if y.Op != OpPPC64ANDconst {
+ if y.Op != OpSelect0 {
break
}
- c := auxIntToInt64(y.AuxInt)
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(y_0.AuxInt)
if !(uint64(c) <= 0xFFFF) {
break
}
}
break
}
- // match: ( OR (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
+ // match: ( OR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 63 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 63 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: ( OR (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y))))
+ // match: ( OR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 63 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: ( OR (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y))))
+ // match: ( OR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 31 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
}
break
}
- // match: ( OR (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
+ // match: ( OR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 31 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 31 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
}
break
}
- // match: (ROTLWconst [r] (ANDconst [m] x))
+ // match: (ROTLWconst [r] (Select0 (ANDCCconst [m] x)))
// cond: isPPC64WordRotateMask(m)
// result: (RLWINM [encodePPC64RotateMask(r,rotateLeft32(m,r),32)] x)
for {
r := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ANDconst {
+ if v_0.Op != OpSelect0 {
break
}
- m := auxIntToInt64(v_0.AuxInt)
- x := v_0.Args[0]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0_0.AuxInt)
+ x := v_0_0.Args[0]
if !(isPPC64WordRotateMask(m)) {
break
}
v.AddArg(x)
return true
}
- // match: (SLDconst [c] z:(ANDconst [d] x))
+ // match: (SLDconst [c] z:(Select0 (ANDCCconst [d] x)))
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d))
// result: (CLRLSLDI [newPPC64ShiftAuxInt(c,64-getPPC64ShiftMaskLength(d),63,64)] x)
for {
c := auxIntToInt64(v.AuxInt)
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
- d := auxIntToInt64(z.AuxInt)
- x := z.Args[0]
+ z_0 := z.Args[0]
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ d := auxIntToInt64(z_0.AuxInt)
+ x := z_0.Args[0]
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (64-getPPC64ShiftMaskLength(d))) {
break
}
v.AddArg(x)
return true
}
- // match: (SLWconst [c] z:(ANDconst [d] x))
+ // match: (SLWconst [c] z:(Select0 (ANDCCconst [d] x)))
// cond: z.Uses == 1 && isPPC64ValidShiftMask(d) && c<=(32-getPPC64ShiftMaskLength(d))
// result: (CLRLSLWI [newPPC64ShiftAuxInt(c,32-getPPC64ShiftMaskLength(d),31,32)] x)
for {
c := auxIntToInt64(v.AuxInt)
z := v_0
- if z.Op != OpPPC64ANDconst {
+ if z.Op != OpSelect0 {
break
}
- d := auxIntToInt64(z.AuxInt)
- x := z.Args[0]
+ z_0 := z.Args[0]
+ if z_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ d := auxIntToInt64(z_0.AuxInt)
+ x := z_0.Args[0]
if !(z.Uses == 1 && isPPC64ValidShiftMask(d) && c <= (32-getPPC64ShiftMaskLength(d))) {
break
}
}
func rewriteValuePPC64_OpPPC64SRWconst(v *Value) bool {
v_0 := v.Args[0]
- // match: (SRWconst (ANDconst [m] x) [s])
+ // match: (SRWconst (Select0 (ANDCCconst [m] x)) [s])
// cond: mergePPC64RShiftMask(m>>uint(s),s,32) == 0
// result: (MOVDconst [0])
for {
s := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ANDconst {
+ if v_0.Op != OpSelect0 {
break
}
- m := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0_0.AuxInt)
if !(mergePPC64RShiftMask(m>>uint(s), s, 32) == 0) {
break
}
v.AuxInt = int64ToAuxInt(0)
return true
}
- // match: (SRWconst (ANDconst [m] x) [s])
+ // match: (SRWconst (Select0 (ANDCCconst [m] x)) [s])
// cond: mergePPC64AndSrwi(m>>uint(s),s) != 0
// result: (RLWINM [mergePPC64AndSrwi(m>>uint(s),s)] x)
for {
s := auxIntToInt64(v.AuxInt)
- if v_0.Op != OpPPC64ANDconst {
+ if v_0.Op != OpSelect0 {
break
}
- m := auxIntToInt64(v_0.AuxInt)
- x := v_0.Args[0]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0_0.AuxInt)
+ x := v_0_0.Args[0]
if !(mergePPC64AndSrwi(m>>uint(s), s) != 0) {
break
}
}
break
}
- // match: (XOR (SLD x (ANDconst [63] y)) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y))))
+ // match: (XOR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 63 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 63 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: (XOR (SLD x (ANDconst [63] y)) (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y))))
+ // match: (XOR (SLD x (Select0 (ANDCCconst [63] y))) (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y)))))
// result: (ROTL x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 63 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 63 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRD {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 63 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTL)
}
break
}
- // match: (XOR (SLW x (ANDconst [31] y)) (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y))))
+ // match: (XOR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 || y != v_1_1_0.Args[0] {
+ if v_1_1_0.Op != OpSelect0 || v_1_1_0.Type != typ.UInt {
+ continue
+ }
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 31 || y != v_1_1_0_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
}
break
}
- // match: (XOR (SLW x (ANDconst [31] y)) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y))))
+ // match: (XOR (SLW x (Select0 (ANDCCconst [31] y))) (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y)))))
// result: (ROTLW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpPPC64ANDconst || auxIntToInt64(v_0_1.AuxInt) != 31 {
+ if v_0_1.Op != OpSelect0 {
continue
}
- y := v_0_1.Args[0]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_1_0.AuxInt) != 31 {
+ continue
+ }
+ y := v_0_1_0.Args[0]
if v_1.Op != OpPPC64SRW {
continue
}
continue
}
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpPPC64ANDconst || v_1_1_1.Type != typ.UInt || auxIntToInt64(v_1_1_1.AuxInt) != 31 || y != v_1_1_1.Args[0] {
+ if v_1_1_1.Op != OpSelect0 || v_1_1_1.Type != typ.UInt {
+ continue
+ }
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_1_1_0.AuxInt) != 31 || y != v_1_1_1_0.Args[0] {
continue
}
v.reset(OpPPC64ROTLW)
return true
}
// match: (Rsh32Ux64 x (AND y (MOVDconst [31])))
- // result: (SRW x (ANDconst <typ.Int32> [31] y))
+ // result: (SRW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Rsh32Ux64 x (ANDconst <typ.UInt> [31] y))
- // result: (SRW x (ANDconst <typ.UInt> [31] y))
+ // match: (Rsh32Ux64 x (Select0 (ANDCCconst <typ.UInt> [31] y)))
+ // result: (SRW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 31 {
+ if v_1.Op != OpSelect0 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
- // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y))))
+ // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
break
}
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64ANDconst || v_1_1.Type != typ.UInt || auxIntToInt64(v_1_1.AuxInt) != 31 {
+ if v_1_1.Op != OpSelect0 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 {
break
}
- y := v_1_1.Args[0]
+ y := v_1_1_0.Args[0]
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(31)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
- // result: (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+ // match: (Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y))))
+ // result: (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
break
}
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
+ if v_1_0.Op != OpSelect0 {
break
}
- y := v_1_0.Args[0]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 31 {
+ break
+ }
+ y := v_1_0_0.Args[0]
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(31)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
// match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(31)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
break
}
// match: (Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+ // result: (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(31)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
return true
}
// match: (Rsh32x64 x (AND y (MOVDconst [31])))
- // result: (SRAW x (ANDconst <typ.Int32> [31] y))
+ // result: (SRAW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int32)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Rsh32x64 x (ANDconst <typ.UInt> [31] y))
- // result: (SRAW x (ANDconst <typ.UInt> [31] y))
+ // match: (Rsh32x64 x (Select0 (ANDCCconst <typ.UInt> [31] y)))
+ // result: (SRAW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 31 {
+ if v_1.Op != OpSelect0 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(31)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(31)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
- // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y))))
+ // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
break
}
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64ANDconst || v_1_1.Type != typ.UInt || auxIntToInt64(v_1_1.AuxInt) != 31 {
+ if v_1_1.Op != OpSelect0 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 {
break
}
- y := v_1_1.Args[0]
+ y := v_1_1_0.Args[0]
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(31)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh32x64 x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
- // result: (SRAW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+ // match: (Rsh32x64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y))))
+ // result: (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
break
}
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
+ if v_1_0.Op != OpSelect0 {
break
}
- y := v_1_0.Args[0]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 31 {
+ break
+ }
+ y := v_1_0_0.Args[0]
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(31)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
// match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (ANDconst <typ.UInt> [31] y)))
+ // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(31)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
break
}
// match: (Rsh32x64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRAW x (SUBFCconst <typ.UInt> [32] (ANDconst <typ.UInt> [31] y)))
+ // result: (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(31)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
return true
}
// match: (Rsh64Ux64 x (AND y (MOVDconst [63])))
- // result: (SRD x (ANDconst <typ.Int64> [63] y))
+ // result: (SRD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Rsh64Ux64 x (ANDconst <typ.UInt> [63] y))
- // result: (SRD x (ANDconst <typ.UInt> [63] y))
+ // match: (Rsh64Ux64 x (Select0 (ANDCCconst <typ.UInt> [63] y)))
+ // result: (SRD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 63 {
+ if v_1.Op != OpSelect0 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
- // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y))))
+ // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
break
}
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64ANDconst || v_1_1.Type != typ.UInt || auxIntToInt64(v_1_1.AuxInt) != 63 {
+ if v_1_1.Op != OpSelect0 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 {
break
}
- y := v_1_1.Args[0]
+ y := v_1_1_0.Args[0]
v.reset(OpPPC64SRD)
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(63)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
- // result: (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
+ // match: (Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y))))
+ // result: (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
break
}
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
+ if v_1_0.Op != OpSelect0 {
break
}
- y := v_1_0.Args[0]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 63 {
+ break
+ }
+ y := v_1_0_0.Args[0]
v.reset(OpPPC64SRD)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(63)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
// match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(63)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
break
}
// match: (Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
+ // result: (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
v.reset(OpPPC64SRD)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(63)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
return true
}
// match: (Rsh64x64 x (AND y (MOVDconst [63])))
- // result: (SRAD x (ANDconst <typ.Int64> [63] y))
+ // result: (SRAD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
for {
x := v_0
if v_1.Op != OpPPC64AND {
continue
}
v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.Int64)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
break
}
- // match: (Rsh64x64 x (ANDconst <typ.UInt> [63] y))
- // result: (SRAD x (ANDconst <typ.UInt> [63] y))
+ // match: (Rsh64x64 x (Select0 (ANDCCconst <typ.UInt> [63] y)))
+ // result: (SRAD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
for {
x := v_0
- if v_1.Op != OpPPC64ANDconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 63 {
+ if v_1.Op != OpSelect0 {
break
}
- y := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
+ break
+ }
+ y := v_1_0.Args[0]
v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(63)
- v0.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(63)
+ v1.AddArg(y)
+ v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
- // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y))))
+ // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
break
}
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64ANDconst || v_1_1.Type != typ.UInt || auxIntToInt64(v_1_1.AuxInt) != 63 {
+ if v_1_1.Op != OpSelect0 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 {
break
}
- y := v_1_1.Args[0]
+ y := v_1_1_0.Args[0]
v.reset(OpPPC64SRAD)
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(63)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
}
- // match: (Rsh64x64 x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
- // result: (SRAD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
+ // match: (Rsh64x64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y))))
+ // result: (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
break
}
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
+ if v_1_0.Op != OpSelect0 {
break
}
- y := v_1_0.Args[0]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 63 {
+ break
+ }
+ y := v_1_0_0.Args[0]
v.reset(OpPPC64SRAD)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(63)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
}
// match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (ANDconst <typ.UInt> [63] y)))
+ // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
+ v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v3.AuxInt = int64ToAuxInt(63)
+ v3.AddArg(y)
+ v2.AddArg(v3)
v0.AddArg2(v1, v2)
v.AddArg2(x, v0)
return true
break
}
// match: (Rsh64x64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRAD x (SUBFCconst <typ.UInt> [64] (ANDconst <typ.UInt> [63] y)))
+ // result: (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
for {
x := v_0
if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
v.reset(OpPPC64SRAD)
v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDconst, typ.UInt)
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
+ v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
+ v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(63)
+ v2.AddArg(y)
+ v1.AddArg(v2)
v0.AddArg(v1)
v.AddArg2(x, v0)
return true
v.AddArg(v0)
return true
}
+ // match: (Select0 (ANDCCconst [m] (ROTLWconst [r] x)))
+ // cond: isPPC64WordRotateMask(m)
+ // result: (RLWINM [encodePPC64RotateMask(r,m,32)] x)
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ROTLWconst {
+ break
+ }
+ r := auxIntToInt64(v_0_0.AuxInt)
+ x := v_0_0.Args[0]
+ if !(isPPC64WordRotateMask(m)) {
+ break
+ }
+ v.reset(OpPPC64RLWINM)
+ v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(r, m, 32))
+ v.AddArg(x)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [m] (ROTLW x r)))
+ // cond: isPPC64WordRotateMask(m)
+ // result: (RLWNM [encodePPC64RotateMask(0,m,32)] x r)
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ROTLW {
+ break
+ }
+ r := v_0_0.Args[1]
+ x := v_0_0.Args[0]
+ if !(isPPC64WordRotateMask(m)) {
+ break
+ }
+ v.reset(OpPPC64RLWNM)
+ v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 32))
+ v.AddArg2(x, r)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [m] (SRWconst x [s])))
+ // cond: mergePPC64RShiftMask(m,s,32) == 0
+ // result: (MOVDconst [0])
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64SRWconst {
+ break
+ }
+ s := auxIntToInt64(v_0_0.AuxInt)
+ if !(mergePPC64RShiftMask(m, s, 32) == 0) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [m] (SRWconst x [s])))
+ // cond: mergePPC64AndSrwi(m,s) != 0
+ // result: (RLWINM [mergePPC64AndSrwi(m,s)] x)
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ m := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64SRWconst {
+ break
+ }
+ s := auxIntToInt64(v_0_0.AuxInt)
+ x := v_0_0.Args[0]
+ if !(mergePPC64AndSrwi(m, s) != 0) {
+ break
+ }
+ v.reset(OpPPC64RLWINM)
+ v.AuxInt = int64ToAuxInt(mergePPC64AndSrwi(m, s))
+ v.AddArg(x)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (Select0 (ANDCCconst [d] x))))
+ // result: (Select0 (ANDCCconst [c&d] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSelect0 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ d := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & d)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [-1] x))
+ // result: x
+ for {
+ if v_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0.AuxInt) != -1 {
+ break
+ }
+ x := v_0.Args[0]
+ v.copyOf(x)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [0] _))
+ // result: (MOVDconst [0])
+ for {
+ if v_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = int64ToAuxInt(0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] y:(MOVBZreg _)))
+ // cond: c&0xFF == 0xFF
+ // result: y
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ if y.Op != OpPPC64MOVBZreg || !(c&0xFF == 0xFF) {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [0xFF] y:(MOVBreg _)))
+ // result: y
+ for {
+ if v_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0.AuxInt) != 0xFF {
+ break
+ }
+ y := v_0.Args[0]
+ if y.Op != OpPPC64MOVBreg {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] y:(MOVHZreg _)))
+ // cond: c&0xFFFF == 0xFFFF
+ // result: y
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ if y.Op != OpPPC64MOVHZreg || !(c&0xFFFF == 0xFFFF) {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [0xFFFF] y:(MOVHreg _)))
+ // result: y
+ for {
+ if v_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0.AuxInt) != 0xFFFF {
+ break
+ }
+ y := v_0.Args[0]
+ if y.Op != OpPPC64MOVHreg {
+ break
+ }
+ v.copyOf(y)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVBreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVBreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVBZreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVBZreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVHreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFFFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVHreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFFFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVHZreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFFFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVHZreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFFFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVWreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFFFFFFFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVWreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFFFFFFFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Select0 (ANDCCconst [c] (MOVWZreg x)))
+ // result: (Select0 (ANDCCconst [c&0xFFFFFFFF] x))
+ for {
+ if v_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64MOVWZreg {
+ break
+ }
+ x := v_0_0.Args[0]
+ v.reset(OpSelect0)
+ v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v0.AuxInt = int64ToAuxInt(c & 0xFFFFFFFF)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
return false
}
func rewriteValuePPC64_OpSelect1(v *Value) bool {
typ := &b.Func.Config.Types
switch b.Kind {
case BlockPPC64EQ:
- // match: (EQ (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (EQ (ANDCCconst [c] x) yes no)
+ // match: (EQ (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (EQ (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64EQ, v0)
return true
}
- // match: (EQ (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (EQ (ANDCCconst [c] x) yes no)
+ // match: (EQ (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (EQ (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64EQ, v0)
return true
}
b.resetWithControl(BlockPPC64EQ, cmp)
return true
}
- // match: (EQ (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (EQ (ANDCCconst [c] x) yes no)
+ // match: (EQ (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (EQ (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64EQ, v0)
return true
}
- // match: (EQ (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (EQ (ANDCCconst [c] x) yes no)
+ // match: (EQ (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (EQ (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64EQ, v0)
return true
}
b.resetWithControl(BlockPPC64LE, cmp)
return true
}
- // match: (GE (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (GE (ANDCCconst [c] x) yes no)
+ // match: (GE (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (GE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64GE, v0)
return true
}
- // match: (GE (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (GE (ANDCCconst [c] x) yes no)
+ // match: (GE (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (GE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64GE, v0)
return true
}
b.resetWithControl(BlockPPC64LT, cmp)
return true
}
- // match: (GT (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (GT (ANDCCconst [c] x) yes no)
+ // match: (GT (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (GT (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64GT, v0)
return true
}
- // match: (GT (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (GT (ANDCCconst [c] x) yes no)
+ // match: (GT (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (GT (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64GT, v0)
return true
}
return true
}
// match: (If cond yes no)
- // result: (NE (CMPWconst [0] (ANDconst <typ.UInt32> [1] cond)) yes no)
+ // result: (NE (CMPWconst [0] (Select0 <typ.UInt32> (ANDCCconst [1] cond))) yes no)
for {
cond := b.Controls[0]
v0 := b.NewValue0(cond.Pos, OpPPC64CMPWconst, types.TypeFlags)
v0.AuxInt = int32ToAuxInt(0)
- v1 := b.NewValue0(cond.Pos, OpPPC64ANDconst, typ.UInt32)
- v1.AuxInt = int64ToAuxInt(1)
- v1.AddArg(cond)
+ v1 := b.NewValue0(cond.Pos, OpSelect0, typ.UInt32)
+ v2 := b.NewValue0(cond.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v2.AuxInt = int64ToAuxInt(1)
+ v2.AddArg(cond)
+ v1.AddArg(v2)
v0.AddArg(v1)
b.resetWithControl(BlockPPC64NE, v0)
return true
b.resetWithControl(BlockPPC64GE, cmp)
return true
}
- // match: (LE (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (LE (ANDCCconst [c] x) yes no)
+ // match: (LE (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (LE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64LE, v0)
return true
}
- // match: (LE (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (LE (ANDCCconst [c] x) yes no)
+ // match: (LE (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (LE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64LE, v0)
return true
}
b.resetWithControl(BlockPPC64GT, cmp)
return true
}
- // match: (LT (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (LT (ANDCCconst [c] x) yes no)
+ // match: (LT (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (LT (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64LT, v0)
return true
}
- // match: (LT (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (LT (ANDCCconst [c] x) yes no)
+ // match: (LT (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (LT (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64LT, v0)
return true
}
break
}
case BlockPPC64NE:
- // match: (NE (CMPWconst [0] (ANDconst [1] (Equal cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (Equal cc)))) yes no)
// result: (EQ cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64Equal {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64Equal {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64EQ, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (NotEqual cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (NotEqual cc)))) yes no)
// result: (NE cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64NotEqual {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64NotEqual {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64NE, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (LessThan cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (LessThan cc)))) yes no)
// result: (LT cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64LessThan {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64LessThan {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64LT, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (LessEqual cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (LessEqual cc)))) yes no)
// result: (LE cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64LessEqual {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64LessEqual {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64LE, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (GreaterThan cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (GreaterThan cc)))) yes no)
// result: (GT cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64GreaterThan {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64GreaterThan {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64GT, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (GreaterEqual cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (GreaterEqual cc)))) yes no)
// result: (GE cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64GreaterEqual {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64GreaterEqual {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64GE, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (FLessThan cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FLessThan cc)))) yes no)
// result: (FLT cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64FLessThan {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64FLessThan {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64FLT, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (FLessEqual cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FLessEqual cc)))) yes no)
// result: (FLE cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64FLessEqual {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64FLessEqual {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64FLE, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (FGreaterThan cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FGreaterThan cc)))) yes no)
// result: (FGT cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64FGreaterThan {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
break
}
- cc := v_0_0_0.Args[0]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64FGreaterThan {
+ break
+ }
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64FGT, cc)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [1] (FGreaterEqual cc))) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [1] (FGreaterEqual cc)))) yes no)
// result: (FGE cc yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst || auxIntToInt64(v_0_0.AuxInt) != 1 {
+ if v_0_0.Op != OpSelect0 {
break
}
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpPPC64FGreaterEqual {
+ if v_0_0_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_0_0_0.AuxInt) != 1 {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpPPC64FGreaterEqual {
break
}
- cc := v_0_0_0.Args[0]
+ cc := v_0_0_0_0.Args[0]
b.resetWithControl(BlockPPC64FGE, cc)
return true
}
- // match: (NE (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (NE (ANDCCconst [c] x) yes no)
+ // match: (NE (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (NE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64NE, v0)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (NE (ANDCCconst [c] x) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (NE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64NE, v0)
return true
}
b.resetWithControl(BlockPPC64NE, cmp)
return true
}
- // match: (NE (CMPconst [0] (ANDconst [c] x)) yes no)
- // result: (NE (ANDCCconst [c] x) yes no)
+ // match: (NE (CMPconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (NE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPconst {
v_0 := b.Controls[0]
if auxIntToInt64(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64NE, v0)
return true
}
- // match: (NE (CMPWconst [0] (ANDconst [c] x)) yes no)
- // result: (NE (ANDCCconst [c] x) yes no)
+ // match: (NE (CMPWconst [0] (Select0 (ANDCCconst [c] x))) yes no)
+ // result: (NE (Select1 <types.TypeFlags> (ANDCCconst [c] x)) yes no)
for b.Controls[0].Op == OpPPC64CMPWconst {
v_0 := b.Controls[0]
if auxIntToInt32(v_0.AuxInt) != 0 {
break
}
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpPPC64ANDconst {
+ if v_0_0.Op != OpSelect0 {
break
}
- c := auxIntToInt64(v_0_0.AuxInt)
- x := v_0_0.Args[0]
- v0 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.TypeFlags)
- v0.AuxInt = int64ToAuxInt(c)
- v0.AddArg(x)
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpPPC64ANDCCconst {
+ break
+ }
+ c := auxIntToInt64(v_0_0_0.AuxInt)
+ x := v_0_0_0.Args[0]
+ v0 := b.NewValue0(v_0.Pos, OpSelect1, types.TypeFlags)
+ v1 := b.NewValue0(v_0.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
+ v1.AuxInt = int64ToAuxInt(c)
+ v1.AddArg(x)
+ v0.AddArg(v1)
b.resetWithControl(BlockPPC64NE, v0)
return true
}