diff options
author | Jan Hubicka <hubicka@ucw.cz> | 2018-01-23 10:55:37 +0100 |
---|---|---|
committer | Jan Hubicka <hubicka@gcc.gnu.org> | 2018-01-23 09:55:37 +0000 |
commit | 9f4b0885e77f836169c290e93faa87b58ab34276 (patch) | |
tree | aab41c448f8a341f2b9981beba03fcff02e3202e /gcc/predict.c | |
parent | dae3e97c81811d87cd9d58e68a85dda83676b7f4 (diff) | |
download | gcc-9f4b0885e77f836169c290e93faa87b58ab34276.zip gcc-9f4b0885e77f836169c290e93faa87b58ab34276.tar.gz gcc-9f4b0885e77f836169c290e93faa87b58ab34276.tar.bz2 |
predict.c (probably_never_executed): Only use precise profile info.
* predict.c (probably_never_executed): Only use precise profile info.
(compute_function_frequency): Skip after inlining hack since we now
have quality checking.
From-SVN: r256975
Diffstat (limited to 'gcc/predict.c')
-rw-r--r-- | gcc/predict.c | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/gcc/predict.c b/gcc/predict.c index 340c766..724ba45 100644 --- a/gcc/predict.c +++ b/gcc/predict.c @@ -212,7 +212,12 @@ probably_never_executed (struct function *fun, gcc_checking_assert (fun); if (count == profile_count::zero ()) return true; - if (count.initialized_p () && profile_status_for_fn (fun) == PROFILE_READ) + /* Do not trust adjusted counts. This will make us to drop int cold section + code with low execution count as a result of inlining. These low counts + are not safe even with read profile and may lead us to dropping + code which actually gets executed into cold section of binary that is not + desirable. */ + if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ) { int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); if (count.apply_scale (unlikely_count_fraction, 1) >= profile_info->runs) @@ -3759,15 +3764,10 @@ compute_function_frequency (void) return; } - /* Only first time try to drop function into unlikely executed. - After inlining the roundoff errors may confuse us. - Ipa-profile pass will drop functions only called from unlikely - functions to unlikely and that is most of what we care about. */ - if (!cfun->after_inlining) - { - node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; - warn_function_cold (current_function_decl); - } + node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; + warn_function_cold (current_function_decl); + if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ()) + return; FOR_EACH_BB_FN (bb, cfun) { if (maybe_hot_bb_p (cfun, bb)) |