summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-threadupdate.c
diff options
context:
space:
mode:
authorJeff Law <law@redhat.com>2017-12-15 15:35:51 -0700
committerJeff Law <law@gcc.gnu.org>2017-12-15 15:35:51 -0700
commitb021b385661c7d6db34ef69e9e0cb89677b54ef4 (patch)
tree1ec9080b51eb1ab9c5076a5080fe7d4d67b53e16 /gcc/tree-ssa-threadupdate.c
parent5806e0622de3356ae291a74a4c0d4c2e0af4c0d6 (diff)
re PR middle-end/36550 (Wrong "may be used uninitialized" warning (conditional PHIs))
PR tree-optimization/36550 * tree-ssa-threadupdate.c (count_stmts_and_phis_in_block): New. (mark_threaded_blocks): Rewrite code to avoid block copying when optimizing for size. Don't pessimize blocks which will be copied, but all the statements will be dead. PR tree-optimization/36550 * gcc.dg/tree-ssa/pr36550.c: New test. From-SVN: r255731
Diffstat (limited to 'gcc/tree-ssa-threadupdate.c')
-rw-r--r--gcc/tree-ssa-threadupdate.c84
1 files changed, 66 insertions, 18 deletions
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index b29ffe195c8..7b823d130fa 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -1737,6 +1737,31 @@ phi_args_equal_on_edges (edge e1, edge e2)
return true;
}
+/* Return the number of non-debug statements and non-virtual PHIs in a
+ block. */
+
+static unsigned int
+count_stmts_and_phis_in_block (basic_block bb)
+{
+ unsigned int num_stmts = 0;
+
+ gphi_iterator gpi;
+ for (gpi = gsi_start_phis (bb); !gsi_end_p (gpi); gsi_next (&gpi))
+ if (!virtual_operand_p (PHI_RESULT (gpi.phi ())))
+ num_stmts++;
+
+ gimple_stmt_iterator gsi;
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple *stmt = gsi_stmt (gsi);
+ if (!is_gimple_debug (stmt))
+ num_stmts++;
+ }
+
+ return num_stmts;
+}
+
+
/* Walk through the registered jump threads and convert them into a
form convenient for this pass.
@@ -1856,28 +1881,51 @@ mark_threaded_blocks (bitmap threaded_blocks)
}
}
- /* If optimizing for size, only thread through block if we don't have
- to duplicate it or it's an otherwise empty redirection block. */
+ /* When optimizing for size, prune all thread paths where statement
+ duplication is necessary.
+
+ We walk the jump thread path looking for copied blocks. There's
+ two types of copied blocks.
+
+ EDGE_COPY_SRC_JOINER_BLOCK is always copied and thus we will
+ cancel the jump threading request when optimizing for size.
+
+ EDGE_COPY_SRC_BLOCK which is copied, but some of its statements
+ will be killed by threading. If threading does not kill all of
+ its statements, then we should cancel the jump threading request
+ when optimizing for size. */
if (optimize_function_for_size_p (cfun))
{
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
{
- bb = BASIC_BLOCK_FOR_FN (cfun, i);
- if (EDGE_COUNT (bb->preds) > 1
- && !redirection_block_p (bb))
- {
- FOR_EACH_EDGE (e, ei, bb->preds)
- {
- if (e->aux)
- {
- vec<jump_thread_edge *> *path = THREAD_PATH (e);
- delete_jump_thread_path (path);
- e->aux = NULL;
- }
- }
- }
- else
- bitmap_set_bit (threaded_blocks, i);
+ FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, i)->preds)
+ if (e->aux)
+ {
+ vec<jump_thread_edge *> *path = THREAD_PATH (e);
+
+ unsigned int j;
+ for (j = 1; j < path->length (); j++)
+ {
+ bb = (*path)[j]->e->src;
+ if (redirection_block_p (bb))
+ ;
+ else if ((*path)[j]->type == EDGE_COPY_SRC_JOINER_BLOCK
+ || ((*path)[j]->type == EDGE_COPY_SRC_BLOCK
+ && (count_stmts_and_phis_in_block (bb)
+ != estimate_threading_killed_stmts (bb))))
+ break;
+ }
+
+ if (j != path->length ())
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ dump_jump_thread_path (dump_file, *path, 0);
+ delete_jump_thread_path (path);
+ e->aux = NULL;
+ }
+ else
+ bitmap_set_bit (threaded_blocks, i);
+ }
}
}
else