summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJan Hubicka <hubicka@ucw.cz>2017-06-08 17:15:51 +0200
committerJan Hubicka <hubicka@gcc.gnu.org>2017-06-08 15:15:51 +0000
commitee4e85b78f5dab1f297aa9278694593c18fe1ae0 (patch)
tree5490f5b728b61ce913e53ff05af703efe199fa22
parentae5512dd4fbfb031cc967fc154a534863d4c05f5 (diff)
cgraph.c (cgraph_edge::maybe_hot_p): Do not check flag_branch_probabilities.
* cgraph.c (cgraph_edge::maybe_hot_p): Do not check flag_branch_probabilities. * ipa-inline.c (edge_badness): Likewise. * ipa-profile.c (ipa_propagate_frequency_1): Likewise. * postreload-gcse.c (eliminate_partially_redundant_load): Likewise. * predict.c (maybe_hot_frequency_p): Likewise. (probably_never_executed): Likewise. * sched-ebb.c (schedule_ebbs): Likewise. * sched-rgn.c (find_single_block_region): Likewise. * tracer.c (tail_duplicate): Likewise. From-SVN: r249020
-rw-r--r--gcc/ChangeLog13
-rw-r--r--gcc/cgraph.c5
-rw-r--r--gcc/ipa-inline.c2
-rw-r--r--gcc/ipa-profile.c2
-rw-r--r--gcc/postreload-gcse.c2
-rw-r--r--gcc/predict.c5
-rw-r--r--gcc/sched-ebb.c2
-rw-r--r--gcc/sched-rgn.c2
-rw-r--r--gcc/tracer.c4
9 files changed, 23 insertions, 14 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 8bff6313c91..65a12395a86 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,18 @@
2017-06-08 Jan Hubicka <hubicka@ucw.cz>
+ * cgraph.c (cgraph_edge::maybe_hot_p): Do not check
+ flag_branch_probabilities.
+ * ipa-inline.c (edge_badness): Likewise.
+ * ipa-profile.c (ipa_propagate_frequency_1): Likewise.
+ * postreload-gcse.c (eliminate_partially_redundant_load): Likewise.
+ * predict.c (maybe_hot_frequency_p): Likewise.
+ (probably_never_executed): Likewise.
+ * sched-ebb.c (schedule_ebbs): Likewise.
+ * sched-rgn.c (find_single_block_region): Likewise.
+ * tracer.c (tail_duplicate): Likewise.
+
+2017-06-08 Jan Hubicka <hubicka@ucw.cz>
+
* opts.c (finish_options): x_flag_reorder_blocks_and_partition no
longer requires x_flag_profile_use.
diff --git a/gcc/cgraph.c b/gcc/cgraph.c
index 2cbacc774d3..213587e7e2f 100644
--- a/gcc/cgraph.c
+++ b/gcc/cgraph.c
@@ -2729,10 +2729,7 @@ cgraph_edge::cannot_lead_to_return_p (void)
bool
cgraph_edge::maybe_hot_p (void)
{
- /* TODO: Export profile_status from cfun->cfg to cgraph_node. */
- if (profile_info
- && opt_for_fn (caller->decl, flag_branch_probabilities)
- && !maybe_hot_count_p (NULL, count))
+ if (!maybe_hot_count_p (NULL, count))
return false;
if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
|| (callee
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index 64c9ebd6692..8924f7eb15f 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1078,7 +1078,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
numerator = ((sreal) 1 >> 8);
if (caller->count > profile_count::zero ())
numerator *= caller->count.to_gcov_type ();
- else if (opt_for_fn (caller->decl, flag_branch_probabilities))
+ else if (caller->count.initialized_p ())
numerator = numerator >> 11;
denominator = growth;
diff --git a/gcc/ipa-profile.c b/gcc/ipa-profile.c
index 05cad3f85b1..f149d0196fa 100644
--- a/gcc/ipa-profile.c
+++ b/gcc/ipa-profile.c
@@ -330,7 +330,7 @@ ipa_propagate_frequency_1 (struct cgraph_node *node, void *data)
it is executed by the train run. Transfer the function only if all
callers are unlikely executed. */
if (profile_info
- && opt_for_fn (d->function_symbol->decl, flag_branch_probabilities)
+ && edge->callee->count.initialized_p ()
/* Thunks are not profiled. This is more or less implementation
bug. */
&& !d->function_symbol->thunk.thunk_p
diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c
index e3632a26f66..a1dcac2600c 100644
--- a/gcc/postreload-gcse.c
+++ b/gcc/postreload-gcse.c
@@ -1158,7 +1158,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
|| (optimize_bb_for_size_p (bb) && npred_ok > 1)
/* If we don't have profile information we cannot tell if splitting
a critical edge is profitable or not so don't do it. */
- || ((! profile_info || ! flag_branch_probabilities
+ || ((! profile_info || profile_status_for_fn (cfun) != PROFILE_READ
|| targetm.cannot_modify_jumps_p ())
&& critical_edge_split))
goto cleanup;
diff --git a/gcc/predict.c b/gcc/predict.c
index 7c7a35d4de6..b460a6f26ee 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -123,8 +123,7 @@ static inline bool
maybe_hot_frequency_p (struct function *fun, int freq)
{
struct cgraph_node *node = cgraph_node::get (fun->decl);
- if (!profile_info
- || !opt_for_fn (fun->decl, flag_branch_probabilities))
+ if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
{
if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return false;
@@ -222,7 +221,7 @@ probably_never_executed (struct function *fun,
return false;
return true;
}
- if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
+ if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
&& (cgraph_node::get (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
diff --git a/gcc/sched-ebb.c b/gcc/sched-ebb.c
index 592fc1f3e88..9123343fa0f 100644
--- a/gcc/sched-ebb.c
+++ b/gcc/sched-ebb.c
@@ -622,7 +622,7 @@ schedule_ebbs (void)
if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
return;
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
diff --git a/gcc/sched-rgn.c b/gcc/sched-rgn.c
index a09fc5d1066..3bb9356693e 100644
--- a/gcc/sched-rgn.c
+++ b/gcc/sched-rgn.c
@@ -477,7 +477,7 @@ find_single_block_region (bool ebbs_p)
if (ebbs_p) {
int probability_cutoff;
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
diff --git a/gcc/tracer.c b/gcc/tracer.c
index bb446731420..0b7f4da0d81 100644
--- a/gcc/tracer.c
+++ b/gcc/tracer.c
@@ -270,7 +270,7 @@ tail_duplicate (void)
bitmap_clear (bb_seen);
initialize_original_copy_tables ();
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
@@ -290,7 +290,7 @@ tail_duplicate (void)
weighted_insns += n * bb->frequency;
}
- if (profile_info && flag_branch_probabilities)
+ if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ)
cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE_FEEDBACK);
else
cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE);