summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorJan Hubicka <hubicka@ucw.cz>2017-06-05 19:41:32 +0200
committerJan Hubicka <hubicka@gcc.gnu.org>2017-06-05 17:41:32 +0000
commitaea5e79a63f965a9ed5fd2ef1943e3865495e987 (patch)
tree52c60043f6ed486d65cb1a8a38cb71979166465c /gcc
parent2f20e7db2caea4909ddff539b2419c9f34c907c4 (diff)
cfgexpand.c (expand_gimple_tailcall): Initialize profile of new edge.
* cfgexpand.c (expand_gimple_tailcall): Initialize profile of new edge. * ipa-inline.c (want_inline_self_recursive_call_p): Watch for missing profile in callgraph edge. * profile-count.h (apply_probability): If THIS is 0, then result is 0 (apply_scale): Likewise. * tree-inline.c (copy_bb, copy_edges_for_bb, copy_cfg_body): Also scale profile when inlining function with zero profile. (initialize_cfun): Update exit block profile even when it is zero. * tree-ssa-threadupdate.c (clear_counts_path): Handle correctly case when profile is read. From-SVN: r248885
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog14
-rw-r--r--gcc/cfgexpand.c4
-rw-r--r--gcc/ipa-inline.c5
-rw-r--r--gcc/profile-count.h6
-rw-r--r--gcc/shrink-wrap.c4
-rw-r--r--gcc/tree-inline.c11
-rw-r--r--gcc/tree-ssa-threadupdate.c14
7 files changed, 43 insertions, 15 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index cc8112d4e01..15841daac59 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,17 @@
+2017-06-05 Jan Hubicka <hubicka@ucw.cz>
+
+ * cfgexpand.c (expand_gimple_tailcall): Initialize profile of
+ new edge.
+ * ipa-inline.c (want_inline_self_recursive_call_p): Watch for missing
+ profile in callgraph edge.
+ * profile-count.h (apply_probability): If THIS is 0, then result is 0
+ (apply_scale): Likewise.
+ * tree-inline.c (copy_bb, copy_edges_for_bb, copy_cfg_body):
+ Also scale profile when inlining function with zero profile.
+ (initialize_cfun): Update exit block profile even when it is zero.
+ * tree-ssa-threadupdate.c (clear_counts_path): Handle correctly case
+ when profile is read.
+
2017-06-05 Michael Meissner <meissner@linux.vnet.ibm.com>
* config/rs6000/rs6000.c (toplevel): Include attribs.h.
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 3261fce8843..c1f80727d30 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -3850,8 +3850,8 @@ expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
| EDGE_SIBCALL);
- e->probability += probability;
- e->count += count;
+ e->probability = probability;
+ e->count = count;
BB_END (bb) = last;
update_bb_for_insn (bb);
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index 8900556ef10..64c9ebd6692 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -912,7 +912,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
methods. */
else
{
- if (max_count > profile_count::zero ()
+ if (max_count > profile_count::zero () && edge->count.initialized_p ()
&& (edge->count.to_gcov_type () * 100
/ outer_node->count.to_gcov_type ()
<= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
@@ -920,7 +920,8 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
reason = "profile of recursive call is too small";
want_inline = false;
}
- else if (max_count == profile_count::zero ()
+ else if ((max_count == profile_count::zero ()
+ || !edge->count.initialized_p ())
&& (edge->frequency * 100 / caller_freq
<= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
{
diff --git a/gcc/profile-count.h b/gcc/profile-count.h
index 2ac7f32278d..e7815dbcfcb 100644
--- a/gcc/profile-count.h
+++ b/gcc/profile-count.h
@@ -221,6 +221,8 @@ public:
profile_count apply_probability (int prob) const
{
gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
+ if (*this == profile_count::zero ())
+ return *this;
if (!initialized_p ())
return profile_count::uninitialized ();
profile_count ret;
@@ -230,6 +232,8 @@ public:
/* Return *THIS * NUM / DEN. */
profile_count apply_scale (int64_t num, int64_t den) const
{
+ if (*this == profile_count::zero ())
+ return *this;
if (!initialized_p ())
return profile_count::uninitialized ();
profile_count ret;
@@ -243,7 +247,7 @@ public:
}
profile_count apply_scale (profile_count num, profile_count den) const
{
- if (*this == profile_count::zero ())
+ if (*this == profile_count::zero () || num == profile_count::zero ())
return profile_count::zero ();
if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
return profile_count::uninitialized ();
diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
index eaa1522bb0d..fb235e2bc9f 100644
--- a/gcc/shrink-wrap.c
+++ b/gcc/shrink-wrap.c
@@ -561,9 +561,11 @@ handle_simple_exit (edge e)
BB_END (old_bb) = end;
redirect_edge_succ (e, new_bb);
+ new_bb->count = e->count;
+ new_bb->frequency = e->frequency;
e->flags |= EDGE_FALLTHRU;
- e = make_edge (new_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
+ e = make_single_succ_edge (new_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
}
e->flags &= ~EDGE_FALLTHRU;
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index 7f20cdc7f8e..329800185ec 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -1763,7 +1763,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
tree decl;
gcov_type freq;
basic_block prev;
- bool scale = num.initialized_p () && den.initialized_p () && den > 0;
+ bool scale = num.initialized_p ()
+ && (den > 0 || num == profile_count::zero ());
/* Search for previous copied basic block. */
prev = bb->prev_bb;
@@ -2211,7 +2212,8 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
gimple_stmt_iterator si;
int flags;
bool need_debug_cleanup = false;
- bool scale = num.initialized_p () && den.initialized_p () && den > 0;
+ bool scale = num.initialized_p ()
+ && (den > 0 || num == profile_count::zero ());
/* Use the indices from the original blocks to create edges for the
new ones. */
@@ -2472,7 +2474,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
*/
if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.initialized_p ()
&& count.initialized_p ()
- && ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count > 0)
+ && ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.initialized_p ())
{
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
@@ -2683,7 +2685,8 @@ copy_cfg_body (copy_body_data * id, profile_count count, int frequency_scale,
profile_count incoming_count = profile_count::zero ();
profile_count num = count;
profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
- bool scale = num.initialized_p () && den.initialized_p () && den > 0;
+ bool scale = num.initialized_p ()
+ && (den > 0 || num == profile_count::zero ());
/* This can happen for COMDAT routines that end up with 0 counts
despite being called (see the comments for handle_missing_profiles()
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index a77c279b41b..df4c6def78a 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -1084,16 +1084,20 @@ clear_counts_path (struct redirection_data *rd)
vec<jump_thread_edge *> *path = THREAD_PATH (e);
edge ein, esucc;
edge_iterator ei;
+ profile_count val = profile_count::uninitialized ();
+ if (profile_status_for_fn (cfun) == PROFILE_READ)
+ val = profile_count::zero ();
+
FOR_EACH_EDGE (ein, ei, e->dest->preds)
- ein->count = profile_count::uninitialized ();
+ ein->count = val;
/* First clear counts along original path. */
for (unsigned int i = 1; i < path->length (); i++)
{
edge epath = (*path)[i]->e;
FOR_EACH_EDGE (esucc, ei, epath->src->succs)
- esucc->count = profile_count::uninitialized ();
- epath->src->count = profile_count::uninitialized ();
+ esucc->count = val;
+ epath->src->count = val;
}
/* Also need to clear the counts along duplicated path. */
for (unsigned int i = 0; i < 2; i++)
@@ -1102,8 +1106,8 @@ clear_counts_path (struct redirection_data *rd)
if (!dup)
continue;
FOR_EACH_EDGE (esucc, ei, dup->succs)
- esucc->count = profile_count::uninitialized ();
- dup->count = profile_count::uninitialized ();
+ esucc->count = val;
+ dup->count = val;
}
}