summaryrefslogtreecommitdiff
path: root/gcc/match.pd
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2020-01-01 01:20:39 +0100
committerJakub Jelinek <jakub@gcc.gnu.org>2020-01-01 01:20:39 +0100
commit2efa10d528bb20bff299a899b1f226b6174b50da (patch)
treea7f908cf8e704324b930e7585e10d53058793740 /gcc/match.pd
parentb3b13bf18692701d54d6754ba7e9f155906c8d17 (diff)
re PR tree-optimization/93098 (ICE with negative shifter)
PR tree-optimization/93098 * match.pd (popcount): For shift amounts, use integer_onep or wi::to_widest () == cst instead of tree_to_uhwi () == cst tests. Make sure that precision is power of two larger than or equal to 16. Ensure shift is never negative. Use HOST_WIDE_INT_UC macro instead of ULL suffixed constants. Formatting fixes. * gcc.c-torture/compile/pr93098.c: New test. From-SVN: r279809
Diffstat (limited to 'gcc/match.pd')
-rw-r--r--gcc/match.pd79
1 files changed, 43 insertions, 36 deletions
diff --git a/gcc/match.pd b/gcc/match.pd
index dda86964b4c..dd2df20e08b 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -5786,43 +5786,50 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT)
return (x * 0x01010101) >> 24;
} */
(simplify
- (rshift
- (mult
- (bit_and
- (plus:c
- (rshift @8 INTEGER_CST@5)
- (plus:c@8
- (bit_and @6 INTEGER_CST@7)
- (bit_and
- (rshift
- (minus@6
- @0
- (bit_and
- (rshift @0 INTEGER_CST@4)
- INTEGER_CST@11))
- INTEGER_CST@10)
- INTEGER_CST@9)))
- INTEGER_CST@3)
- INTEGER_CST@2)
- INTEGER_CST@1)
+ (rshift
+ (mult
+ (bit_and
+ (plus:c
+ (rshift @8 INTEGER_CST@5)
+ (plus:c@8
+ (bit_and @6 INTEGER_CST@7)
+ (bit_and
+ (rshift
+ (minus@6 @0
+ (bit_and (rshift @0 INTEGER_CST@4) INTEGER_CST@11))
+ INTEGER_CST@10)
+ INTEGER_CST@9)))
+ INTEGER_CST@3)
+ INTEGER_CST@2)
+ INTEGER_CST@1)
/* Check constants and optab. */
- (with
- {
- unsigned prec = TYPE_PRECISION (type);
- int shift = 64 - prec;
- const unsigned HOST_WIDE_INT c1 = 0x0101010101010101ULL >> shift,
- c2 = 0x0F0F0F0F0F0F0F0FULL >> shift,
- c3 = 0x3333333333333333ULL >> shift,
- c4 = 0x5555555555555555ULL >> shift;
- }
- (if (prec <= 64 && TYPE_UNSIGNED (type) && tree_to_uhwi (@4) == 1
- && tree_to_uhwi (@10) == 2 && tree_to_uhwi (@5) == 4
- && tree_to_uhwi (@1) == prec - 8 && tree_to_uhwi (@2) == c1
- && tree_to_uhwi (@3) == c2 && tree_to_uhwi (@9) == c3
- && tree_to_uhwi (@7) == c3 && tree_to_uhwi (@11) == c4
- && direct_internal_fn_supported_p (IFN_POPCOUNT, type,
- OPTIMIZE_FOR_BOTH))
- (convert (IFN_POPCOUNT:type @0)))))
+ (with { unsigned prec = TYPE_PRECISION (type);
+ int shift = (64 - prec) & 63;
+ unsigned HOST_WIDE_INT c1
+ = HOST_WIDE_INT_UC (0x0101010101010101) >> shift;
+ unsigned HOST_WIDE_INT c2
+ = HOST_WIDE_INT_UC (0x0F0F0F0F0F0F0F0F) >> shift;
+ unsigned HOST_WIDE_INT c3
+ = HOST_WIDE_INT_UC (0x3333333333333333) >> shift;
+ unsigned HOST_WIDE_INT c4
+ = HOST_WIDE_INT_UC (0x5555555555555555) >> shift;
+ }
+ (if (prec >= 16
+ && prec <= 64
+ && pow2p_hwi (prec)
+ && TYPE_UNSIGNED (type)
+ && integer_onep (@4)
+ && wi::to_widest (@10) == 2
+ && wi::to_widest (@5) == 4
+ && wi::to_widest (@1) == prec - 8
+ && tree_to_uhwi (@2) == c1
+ && tree_to_uhwi (@3) == c2
+ && tree_to_uhwi (@9) == c3
+ && tree_to_uhwi (@7) == c3
+ && tree_to_uhwi (@11) == c4
+ && direct_internal_fn_supported_p (IFN_POPCOUNT, type,
+ OPTIMIZE_FOR_BOTH))
+ (convert (IFN_POPCOUNT:type @0)))))
#endif
/* Simplify: