1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
34 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
58 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
59 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
60 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
61 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
63 /* Random guesstimation given names. */
64 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
65 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
66 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
67 #define PROB_ALWAYS (REG_BR_PROB_BASE)
69 static bool predicted_by_p (basic_block, enum br_predictor);
70 static void combine_predictions_for_insn (rtx, basic_block);
71 static void dump_prediction (enum br_predictor, int, basic_block, int);
72 static void estimate_loops_at_level (struct loop *loop);
73 static void propagate_freq (struct loop *);
74 static void estimate_bb_frequencies (struct loops *);
75 static void counts_to_freqs (void);
76 static void process_note_predictions (basic_block, int *);
77 static void process_note_prediction (basic_block, int *, int, int);
78 static bool last_basic_block_p (basic_block);
79 static void compute_function_frequency (void);
80 static void choose_function_section (void);
81 static bool can_predict_insn_p (rtx);
83 /* Information we hold about each branch predictor.
84 Filled using information from predict.def. */
88 const char *const name; /* Name used in the debugging dumps. */
89 const int hitrate; /* Expected hitrate used by
90 predict_insn_def call. */
94 /* Use given predictor without Dempster-Shaffer theory if it matches
95 using first_match heuristics. */
96 #define PRED_FLAG_FIRST_MATCH 1
98 /* Recompute hitrate in percent to our representation. */
100 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
102 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
103 static const struct predictor_info predictor_info[]= {
104 #include "predict.def"
106 /* Upper bound on predictors. */
111 /* Return true in case BB can be CPU intensive and should be optimized
112 for maximal performance. */
115 maybe_hot_bb_p (basic_block bb)
117 if (profile_info && flag_branch_probabilities
119 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
121 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
126 /* Return true in case BB is cold and should be optimized for size. */
129 probably_cold_bb_p (basic_block bb)
131 if (profile_info && flag_branch_probabilities
133 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
135 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
140 /* Return true in case BB is probably never executed. */
142 probably_never_executed_bb_p (basic_block bb)
144 if (profile_info && flag_branch_probabilities)
145 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 /* Return true if the one of outgoing edges is already predicted by
153 predicted_by_p (basic_block bb, enum br_predictor predictor)
156 if (!INSN_P (BB_END (bb)))
158 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
159 if (REG_NOTE_KIND (note) == REG_BR_PRED
160 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
166 predict_insn (rtx insn, enum br_predictor predictor, int probability)
168 if (!any_condjump_p (insn))
170 if (!flag_guess_branch_prob)
174 = gen_rtx_EXPR_LIST (REG_BR_PRED,
175 gen_rtx_CONCAT (VOIDmode,
176 GEN_INT ((int) predictor),
177 GEN_INT ((int) probability)),
181 /* Predict insn by given predictor. */
184 predict_insn_def (rtx insn, enum br_predictor predictor,
185 enum prediction taken)
187 int probability = predictor_info[(int) predictor].hitrate;
190 probability = REG_BR_PROB_BASE - probability;
192 predict_insn (insn, predictor, probability);
195 /* Predict edge E with given probability if possible. */
198 predict_edge (edge e, enum br_predictor predictor, int probability)
201 last_insn = BB_END (e->src);
203 /* We can store the branch prediction information only about
204 conditional jumps. */
205 if (!any_condjump_p (last_insn))
208 /* We always store probability of branching. */
209 if (e->flags & EDGE_FALLTHRU)
210 probability = REG_BR_PROB_BASE - probability;
212 predict_insn (last_insn, predictor, probability);
215 /* Return true when we can store prediction on insn INSN.
216 At the moment we represent predictions only on conditional
217 jumps, not at computed jump or other complicated cases. */
219 can_predict_insn_p (rtx insn)
221 return (GET_CODE (insn) == JUMP_INSN
222 && any_condjump_p (insn)
223 && BLOCK_FOR_INSN (insn)->succ->succ_next);
226 /* Predict edge E by given predictor if possible. */
229 predict_edge_def (edge e, enum br_predictor predictor,
230 enum prediction taken)
232 int probability = predictor_info[(int) predictor].hitrate;
235 probability = REG_BR_PROB_BASE - probability;
237 predict_edge (e, predictor, probability);
240 /* Invert all branch predictions or probability notes in the INSN. This needs
241 to be done each time we invert the condition used by the jump. */
244 invert_br_probabilities (rtx insn)
248 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
249 if (REG_NOTE_KIND (note) == REG_BR_PROB)
250 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
251 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
252 XEXP (XEXP (note, 0), 1)
253 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
256 /* Dump information about the branch prediction to the output file. */
259 dump_prediction (enum br_predictor predictor, int probability,
260 basic_block bb, int used)
267 while (e && (e->flags & EDGE_FALLTHRU))
270 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
271 predictor_info[predictor].name,
272 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
276 fprintf (rtl_dump_file, " exec ");
277 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
280 fprintf (rtl_dump_file, " hit ");
281 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
282 fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
286 fprintf (rtl_dump_file, "\n");
289 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
290 note if not already present. Remove now useless REG_BR_PRED notes. */
293 combine_predictions_for_insn (rtx insn, basic_block bb)
295 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
296 rtx *pnote = ®_NOTES (insn);
298 int best_probability = PROB_EVEN;
299 int best_predictor = END_PREDICTORS;
300 int combined_probability = REG_BR_PROB_BASE / 2;
302 bool first_match = false;
306 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
309 /* We implement "first match" heuristics and use probability guessed
310 by predictor with smallest index. In the future we will use better
311 probability combination techniques. */
312 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
313 if (REG_NOTE_KIND (note) == REG_BR_PRED)
315 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
316 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
319 if (best_predictor > predictor)
320 best_probability = probability, best_predictor = predictor;
322 d = (combined_probability * probability
323 + (REG_BR_PROB_BASE - combined_probability)
324 * (REG_BR_PROB_BASE - probability));
326 /* Use FP math to avoid overflows of 32bit integers. */
328 /* If one probability is 0% and one 100%, avoid division by zero. */
329 combined_probability = REG_BR_PROB_BASE / 2;
331 combined_probability = (((double) combined_probability) * probability
332 * REG_BR_PROB_BASE / d + 0.5);
335 /* Decide which heuristic to use. In case we didn't match anything,
336 use no_prediction heuristic, in case we did match, use either
337 first match or Dempster-Shaffer theory depending on the flags. */
339 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
343 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
346 dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
347 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
351 combined_probability = best_probability;
352 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
356 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
358 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
359 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
361 dump_prediction (predictor, probability, bb,
362 !first_match || best_predictor == predictor);
363 *pnote = XEXP (*pnote, 1);
366 pnote = &XEXP (*pnote, 1);
372 = gen_rtx_EXPR_LIST (REG_BR_PROB,
373 GEN_INT (combined_probability), REG_NOTES (insn));
375 /* Save the prediction into CFG in case we are seeing non-degenerated
377 if (bb->succ->succ_next)
379 BRANCH_EDGE (bb)->probability = combined_probability;
380 FALLTHRU_EDGE (bb)->probability
381 = REG_BR_PROB_BASE - combined_probability;
386 /* Statically estimate the probability that a branch will be taken.
387 ??? In the next revision there will be a number of other predictors added
388 from the above references. Further, each heuristic will be factored out
389 into its own function for clarity (and to facilitate the combination of
393 estimate_probability (struct loops *loops_info)
398 connect_infinite_loops_to_exit ();
399 calculate_dominance_info (CDI_DOMINATORS);
400 calculate_dominance_info (CDI_POST_DOMINATORS);
402 /* Try to predict out blocks in a loop that are not part of a
404 for (i = 1; i < loops_info->num; i++)
406 basic_block bb, *bbs;
409 struct loop *loop = loops_info->parray[i];
410 struct loop_desc desc;
411 unsigned HOST_WIDE_INT niter;
413 flow_loop_scan (loop, LOOP_EXIT_EDGES);
414 exits = loop->num_exits;
416 if (simple_loop_p (loop, &desc) && desc.const_iter)
419 niter = desc.niter + 1;
420 if (niter == 0) /* We might overflow here. */
423 prob = (REG_BR_PROB_BASE
424 - (REG_BR_PROB_BASE + niter /2) / niter);
425 /* Branch prediction algorithm gives 0 frequency for everything
426 after the end of loop for loop having 0 probability to finish. */
427 if (prob == REG_BR_PROB_BASE)
428 prob = REG_BR_PROB_BASE - 1;
429 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
433 bbs = get_loop_body (loop);
434 for (j = 0; j < loop->num_nodes; j++)
436 int header_found = 0;
441 /* Bypass loop heuristics on continue statement. These
442 statements construct loops via "non-loop" constructs
443 in the source language and are better to be handled
445 if (!can_predict_insn_p (BB_END (bb))
446 || predicted_by_p (bb, PRED_CONTINUE))
449 /* Loop branch heuristics - predict an edge back to a
450 loop's head as taken. */
451 for (e = bb->succ; e; e = e->succ_next)
452 if (e->dest == loop->header
453 && e->src == loop->latch)
456 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
459 /* Loop exit heuristics - predict an edge exiting the loop if the
460 conditional has no loop header successors as not taken. */
462 for (e = bb->succ; e; e = e->succ_next)
463 if (e->dest->index < 0
464 || !flow_bb_inside_loop_p (loop, e->dest))
468 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
472 /* Free basic blocks from get_loop_body. */
476 /* Attempt to predict conditional jumps using a number of heuristics. */
479 rtx last_insn = BB_END (bb);
483 if (! can_predict_insn_p (last_insn))
486 for (e = bb->succ; e; e = e->succ_next)
488 /* Predict early returns to be probable, as we've already taken
489 care for error returns and other are often used for fast paths
491 if ((e->dest == EXIT_BLOCK_PTR
492 || (e->dest->succ && !e->dest->succ->succ_next
493 && e->dest->succ->dest == EXIT_BLOCK_PTR))
494 && !predicted_by_p (bb, PRED_NULL_RETURN)
495 && !predicted_by_p (bb, PRED_CONST_RETURN)
496 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
497 && !last_basic_block_p (e->dest))
498 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
500 /* Look for block we are guarding (ie we dominate it,
501 but it doesn't postdominate us). */
502 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
503 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
504 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
508 /* The call heuristic claims that a guarded function call
509 is improbable. This is because such calls are often used
510 to signal exceptional situations such as printing error
512 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
513 insn = NEXT_INSN (insn))
514 if (GET_CODE (insn) == CALL_INSN
515 /* Constant and pure calls are hardly used to signalize
516 something exceptional. */
517 && ! CONST_OR_PURE_CALL_P (insn))
519 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
525 cond = get_condition (last_insn, &earliest, false);
529 /* Try "pointer heuristic."
530 A comparison ptr == 0 is predicted as false.
531 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
532 if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
533 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
534 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
536 if (GET_CODE (cond) == EQ)
537 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
538 else if (GET_CODE (cond) == NE)
539 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
543 /* Try "opcode heuristic."
544 EQ tests are usually false and NE tests are usually true. Also,
545 most quantities are positive, so we can make the appropriate guesses
546 about signed comparisons against zero. */
547 switch (GET_CODE (cond))
550 /* Unconditional branch. */
551 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
552 cond == const0_rtx ? NOT_TAKEN : TAKEN);
557 /* Floating point comparisons appears to behave in a very
558 unpredictable way because of special role of = tests in
560 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
562 /* Comparisons with 0 are often used for booleans and there is
563 nothing useful to predict about them. */
564 else if (XEXP (cond, 1) == const0_rtx
565 || XEXP (cond, 0) == const0_rtx)
568 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
573 /* Floating point comparisons appears to behave in a very
574 unpredictable way because of special role of = tests in
576 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
578 /* Comparisons with 0 are often used for booleans and there is
579 nothing useful to predict about them. */
580 else if (XEXP (cond, 1) == const0_rtx
581 || XEXP (cond, 0) == const0_rtx)
584 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
588 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
592 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
597 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
598 || XEXP (cond, 1) == constm1_rtx)
599 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
604 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
605 || XEXP (cond, 1) == constm1_rtx)
606 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
614 /* Attach the combined probability to each conditional jump. */
616 if (GET_CODE (BB_END (bb)) == JUMP_INSN
617 && any_condjump_p (BB_END (bb))
618 && bb->succ->succ_next != NULL)
619 combine_predictions_for_insn (BB_END (bb), bb);
621 free_dominance_info (CDI_POST_DOMINATORS);
623 remove_fake_edges ();
624 estimate_bb_frequencies (loops_info);
627 /* __builtin_expect dropped tokens into the insn stream describing expected
628 values of registers. Generate branch probabilities based off these
632 expected_value_to_br_prob (void)
634 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
636 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
638 switch (GET_CODE (insn))
641 /* Look for expected value notes. */
642 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
644 ev = NOTE_EXPECTED_VALUE (insn);
645 ev_reg = XEXP (ev, 0);
651 /* Never propagate across labels. */
656 /* Look for simple conditional branches. If we haven't got an
657 expected value yet, no point going further. */
658 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
659 || ! any_condjump_p (insn))
664 /* Look for insns that clobber the EV register. */
665 if (ev && reg_set_p (ev_reg, insn))
670 /* Collect the branch condition, hopefully relative to EV_REG. */
671 /* ??? At present we'll miss things like
672 (expected_value (eq r70 0))
674 (set r80 (lt r70 r71))
675 (set pc (if_then_else (ne r80 0) ...))
676 as canonicalize_condition will render this to us as
678 Could use cselib to try and reduce this further. */
679 cond = XEXP (SET_SRC (pc_set (insn)), 0);
680 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg, false);
681 if (! cond || XEXP (cond, 0) != ev_reg
682 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
685 /* Substitute and simplify. Given that the expression we're
686 building involves two constants, we should wind up with either
688 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
689 XEXP (ev, 1), XEXP (cond, 1));
690 cond = simplify_rtx (cond);
692 /* Turn the condition into a scaled branch probability. */
693 if (cond != const_true_rtx && cond != const0_rtx)
695 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
696 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
700 /* Check whether this is the last basic block of function. Commonly
701 there is one extra common cleanup block. */
703 last_basic_block_p (basic_block bb)
705 if (bb == EXIT_BLOCK_PTR)
708 return (bb->next_bb == EXIT_BLOCK_PTR
709 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
710 && bb->succ && !bb->succ->succ_next
711 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
714 /* Sets branch probabilities according to PREDiction and
715 FLAGS. HEADS[bb->index] should be index of basic block in that we
716 need to alter branch predictions (i.e. the first of our dominators
717 such that we do not post-dominate it) (but we fill this information
718 on demand, so -1 may be there in case this was not needed yet). */
721 process_note_prediction (basic_block bb, int *heads, int pred, int flags)
727 taken = flags & IS_TAKEN;
729 if (heads[bb->index] < 0)
731 /* This is first time we need this field in heads array; so
732 find first dominator that we do not post-dominate (we are
733 using already known members of heads array). */
735 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
738 while (heads[next_ai->index] < 0)
740 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
742 heads[next_ai->index] = ai->index;
744 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
746 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
747 head = next_ai->index;
749 head = heads[next_ai->index];
750 while (next_ai != bb)
753 if (heads[ai->index] == ENTRY_BLOCK)
754 ai = ENTRY_BLOCK_PTR;
756 ai = BASIC_BLOCK (heads[ai->index]);
757 heads[next_ai->index] = head;
760 y = heads[bb->index];
762 /* Now find the edge that leads to our branch and aply the prediction. */
764 if (y == last_basic_block || !can_predict_insn_p (BB_END (BASIC_BLOCK (y))))
766 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
767 if (e->dest->index >= 0
768 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
769 predict_edge_def (e, pred, taken);
772 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
773 into branch probabilities. For description of heads array, see
774 process_note_prediction. */
777 process_note_predictions (basic_block bb, int *heads)
782 /* Additionally, we check here for blocks with no successors. */
783 int contained_noreturn_call = 0;
785 int noreturn_block = 1;
787 for (insn = BB_END (bb); insn;
788 was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
790 if (GET_CODE (insn) != NOTE)
796 /* Noreturn calls cause program to exit, therefore they are
797 always predicted as not taken. */
798 if (GET_CODE (insn) == CALL_INSN
799 && find_reg_note (insn, REG_NORETURN, NULL))
800 contained_noreturn_call = 1;
804 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
806 int alg = (int) NOTE_PREDICTION_ALG (insn);
807 /* Process single prediction note. */
808 process_note_prediction (bb,
810 alg, (int) NOTE_PREDICTION_FLAGS (insn));
814 for (e = bb->succ; e; e = e->succ_next)
815 if (!(e->flags & EDGE_FAKE))
817 if (contained_noreturn_call)
819 /* This block ended from other reasons than because of return.
820 If it is because of noreturn call, this should certainly not
821 be taken. Otherwise it is probably some error recovery. */
822 process_note_prediction (bb, heads, PRED_NORETURN, NOT_TAKEN);
826 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
827 branch probabilities. */
830 note_prediction_to_br_prob (void)
835 /* To enable handling of noreturn blocks. */
836 add_noreturn_fake_exit_edges ();
837 connect_infinite_loops_to_exit ();
839 calculate_dominance_info (CDI_POST_DOMINATORS);
840 calculate_dominance_info (CDI_DOMINATORS);
842 heads = xmalloc (sizeof (int) * last_basic_block);
843 memset (heads, -1, sizeof (int) * last_basic_block);
844 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
846 /* Process all prediction notes. */
849 process_note_predictions (bb, heads);
851 free_dominance_info (CDI_POST_DOMINATORS);
852 free_dominance_info (CDI_DOMINATORS);
855 remove_fake_edges ();
858 /* This is used to carry information about basic blocks. It is
859 attached to the AUX field of the standard CFG block. */
861 typedef struct block_info_def
863 /* Estimated frequency of execution of basic_block. */
866 /* To keep queue of basic blocks to process. */
869 /* True if block needs to be visited in propagate_freq. */
870 unsigned int tovisit:1;
872 /* Number of predecessors we need to visit first. */
876 /* Similar information for edges. */
877 typedef struct edge_info_def
879 /* In case edge is an loopback edge, the probability edge will be reached
880 in case header is. Estimated number of iterations of the loop can be
881 then computed as 1 / (1 - back_edge_prob). */
882 sreal back_edge_prob;
883 /* True if the edge is an loopback edge in the natural loop. */
884 unsigned int back_edge:1;
887 #define BLOCK_INFO(B) ((block_info) (B)->aux)
888 #define EDGE_INFO(E) ((edge_info) (E)->aux)
890 /* Helper function for estimate_bb_frequencies.
891 Propagate the frequencies for LOOP. */
894 propagate_freq (struct loop *loop)
896 basic_block head = loop->header;
902 /* For each basic block we need to visit count number of his predecessors
903 we need to visit first. */
906 if (BLOCK_INFO (bb)->tovisit)
910 for (e = bb->pred; e; e = e->pred_next)
911 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
913 else if (BLOCK_INFO (e->src)->tovisit
914 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
915 fprintf (rtl_dump_file,
916 "Irreducible region hit, ignoring edge to %i->%i\n",
917 e->src->index, bb->index);
918 BLOCK_INFO (bb)->npredecessors = count;
922 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
924 for (bb = head; bb; bb = nextbb)
926 sreal cyclic_probability, frequency;
928 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
929 memcpy (&frequency, &real_zero, sizeof (real_zero));
931 nextbb = BLOCK_INFO (bb)->next;
932 BLOCK_INFO (bb)->next = NULL;
934 /* Compute frequency of basic block. */
937 #ifdef ENABLE_CHECKING
938 for (e = bb->pred; e; e = e->pred_next)
939 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
943 for (e = bb->pred; e; e = e->pred_next)
944 if (EDGE_INFO (e)->back_edge)
946 sreal_add (&cyclic_probability, &cyclic_probability,
947 &EDGE_INFO (e)->back_edge_prob);
949 else if (!(e->flags & EDGE_DFS_BACK))
953 /* frequency += (e->probability
954 * BLOCK_INFO (e->src)->frequency /
955 REG_BR_PROB_BASE); */
957 sreal_init (&tmp, e->probability, 0);
958 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
959 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
960 sreal_add (&frequency, &frequency, &tmp);
963 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
965 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
970 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
972 memcpy (&cyclic_probability, &real_almost_one,
973 sizeof (real_almost_one));
976 /* BLOCK_INFO (bb)->frequency = frequency
977 / (1 - cyclic_probability) */
979 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
980 sreal_div (&BLOCK_INFO (bb)->frequency,
981 &frequency, &cyclic_probability);
985 BLOCK_INFO (bb)->tovisit = 0;
987 /* Compute back edge frequencies. */
988 for (e = bb->succ; e; e = e->succ_next)
993 /* EDGE_INFO (e)->back_edge_prob
994 = ((e->probability * BLOCK_INFO (bb)->frequency)
995 / REG_BR_PROB_BASE); */
997 sreal_init (&tmp, e->probability, 0);
998 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
999 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1000 &tmp, &real_inv_br_prob_base);
1003 /* Propagate to successor blocks. */
1004 for (e = bb->succ; e; e = e->succ_next)
1005 if (!(e->flags & EDGE_DFS_BACK)
1006 && BLOCK_INFO (e->dest)->npredecessors)
1008 BLOCK_INFO (e->dest)->npredecessors--;
1009 if (!BLOCK_INFO (e->dest)->npredecessors)
1014 BLOCK_INFO (last)->next = e->dest;
1022 /* Estimate probabilities of loopback edges in loops at same nest level. */
1025 estimate_loops_at_level (struct loop *first_loop)
1029 for (loop = first_loop; loop; loop = loop->next)
1035 estimate_loops_at_level (loop->inner);
1037 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1039 /* Find current loop back edge and mark it. */
1040 e = loop_latch_edge (loop);
1041 EDGE_INFO (e)->back_edge = 1;
1044 bbs = get_loop_body (loop);
1045 for (i = 0; i < loop->num_nodes; i++)
1046 BLOCK_INFO (bbs[i])->tovisit = 1;
1048 propagate_freq (loop);
1052 /* Convert counts measured by profile driven feedback to frequencies. */
1055 counts_to_freqs (void)
1057 gcov_type count_max = 1;
1061 count_max = MAX (bb->count, count_max);
1063 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1064 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1067 /* Return true if function is likely to be expensive, so there is no point to
1068 optimize performance of prologue, epilogue or do inlining at the expense
1069 of code size growth. THRESHOLD is the limit of number of instructions
1070 function can execute at average to be still considered not expensive. */
1073 expensive_function_p (int threshold)
1075 unsigned int sum = 0;
1079 /* We can not compute accurately for large thresholds due to scaled
1081 if (threshold > BB_FREQ_MAX)
1084 /* Frequencies are out of range. This either means that function contains
1085 internal loop executing more than BB_FREQ_MAX times or profile feedback
1086 is available and function has not been executed at all. */
1087 if (ENTRY_BLOCK_PTR->frequency == 0)
1090 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1091 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1096 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1097 insn = NEXT_INSN (insn))
1098 if (active_insn_p (insn))
1100 sum += bb->frequency;
1109 /* Estimate basic blocks frequency by given branch probabilities. */
1112 estimate_bb_frequencies (struct loops *loops)
1117 if (flag_branch_probabilities)
1121 static int real_values_initialized = 0;
1123 if (!real_values_initialized)
1125 real_values_initialized = 1;
1126 sreal_init (&real_zero, 0, 0);
1127 sreal_init (&real_one, 1, 0);
1128 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1129 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1130 sreal_init (&real_one_half, 1, -1);
1131 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1132 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1135 mark_dfs_back_edges ();
1136 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1140 rtx last_insn = BB_END (bb);
1142 if (!can_predict_insn_p (last_insn))
1144 /* We can predict only conditional jumps at the moment.
1145 Expect each edge to be equally probable.
1146 ?? In the future we want to make abnormal edges improbable. */
1150 for (e = bb->succ; e; e = e->succ_next)
1153 if (e->probability != 0)
1157 for (e = bb->succ; e; e = e->succ_next)
1158 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1162 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1164 /* Set up block info for each basic block. */
1165 alloc_aux_for_blocks (sizeof (struct block_info_def));
1166 alloc_aux_for_edges (sizeof (struct edge_info_def));
1167 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1171 BLOCK_INFO (bb)->tovisit = 0;
1172 for (e = bb->succ; e; e = e->succ_next)
1174 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1175 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1176 &EDGE_INFO (e)->back_edge_prob,
1177 &real_inv_br_prob_base);
1181 /* First compute probabilities locally for each loop from innermost
1182 to outermost to examine probabilities for back edges. */
1183 estimate_loops_at_level (loops->tree_root);
1185 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1187 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1188 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1190 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1191 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1195 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1196 sreal_add (&tmp, &tmp, &real_one_half);
1197 bb->frequency = sreal_to_int (&tmp);
1200 free_aux_for_blocks ();
1201 free_aux_for_edges ();
1203 compute_function_frequency ();
1204 if (flag_reorder_functions)
1205 choose_function_section ();
1208 /* Decide whether function is hot, cold or unlikely executed. */
1210 compute_function_frequency (void)
1214 if (!profile_info || !flag_branch_probabilities)
1216 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1219 if (maybe_hot_bb_p (bb))
1221 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1224 if (!probably_never_executed_bb_p (bb))
1225 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1229 /* Choose appropriate section for the function. */
1231 choose_function_section (void)
1233 if (DECL_SECTION_NAME (current_function_decl)
1234 || !targetm.have_named_sections
1235 /* Theoretically we can split the gnu.linkonce text section too,
1236 but this requires more work as the frequency needs to match
1237 for all generated objects so we need to merge the frequency
1238 of all instances. For now just never set frequency for these. */
1239 || DECL_ONE_ONLY (current_function_decl))
1241 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1242 DECL_SECTION_NAME (current_function_decl) =
1243 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1244 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1245 DECL_SECTION_NAME (current_function_decl) =
1246 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1247 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);