summaryrefslogtreecommitdiff
path: root/gcc/lcm.c
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2014-01-15 08:47:30 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2014-01-15 08:47:30 +0000
commit95cb86978721e08640cdad52fa5c54409ad6488c (patch)
treeeb095a46488fa76a8001e7482f9f1466118efe39 /gcc/lcm.c
parentcb4b6d170652b99b8aee2430737051cdc42b5517 (diff)
re PR rtl-optimization/59802 (excessive compile time in RTL optimizers (loop unswitching, CPROP))
2014-01-15 Richard Biener <rguenther@suse.de> PR rtl-optimization/59802 * lcm.c (compute_available): Use inverted postorder to seed the initial worklist. From-SVN: r206624
Diffstat (limited to 'gcc/lcm.c')
-rw-r--r--gcc/lcm.c12
1 files changed, 10 insertions, 2 deletions
diff --git a/gcc/lcm.c b/gcc/lcm.c
index aab64a6cda7..70d96c14d7c 100644
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -496,12 +496,20 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
/* Put every block on the worklist; this is necessary because of the
- optimistic initialization of AVOUT above. */
- FOR_EACH_BB_FN (bb, cfun)
+ optimistic initialization of AVOUT above. Use inverted postorder
+ to make the dataflow problem require less iterations. */
+ int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+ int postorder_num = inverted_post_order_compute (postorder);
+ for (int i = 0; i < postorder_num; ++i)
{
+ bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
+ if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
+ || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+ continue;
*qin++ = bb;
bb->aux = bb;
}
+ free (postorder);
qin = worklist;
qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];