1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
40 /* Holds the interesting trailing notes for the function. */
41 rtx cfg_layout_function_footer, cfg_layout_function_header;
43 static rtx skip_insns_after_block (basic_block);
44 static void record_effective_endpoints (void);
45 static rtx label_for_bb (basic_block);
46 static void fixup_reorder_chain (void);
48 static void set_block_levels (tree, int);
49 static void change_scope (rtx, tree, tree);
51 void verify_insn_chain (void);
52 static void fixup_fallthru_exit_predecessor (void);
53 static tree insn_scope (rtx);
54 static void update_unlikely_executed_notes (basic_block);
57 unlink_insn_chain (rtx first, rtx last)
59 rtx prevfirst = PREV_INSN (first);
60 rtx nextlast = NEXT_INSN (last);
62 PREV_INSN (first) = NULL;
63 NEXT_INSN (last) = NULL;
65 NEXT_INSN (prevfirst) = nextlast;
67 PREV_INSN (nextlast) = prevfirst;
69 set_last_insn (prevfirst);
71 set_first_insn (nextlast);
75 /* Skip over inter-block insns occurring after BB which are typically
76 associated with BB (e.g., barriers). If there are any such insns,
77 we return the last one. Otherwise, we return the end of BB. */
80 skip_insns_after_block (basic_block bb)
82 rtx insn, last_insn, next_head, prev;
85 if (bb->next_bb != EXIT_BLOCK_PTR)
86 next_head = BB_HEAD (bb->next_bb);
88 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
90 if (insn == next_head)
93 switch (GET_CODE (insn))
100 switch (NOTE_LINE_NUMBER (insn))
102 case NOTE_INSN_LOOP_END:
103 case NOTE_INSN_BLOCK_END:
106 case NOTE_INSN_DELETED:
107 case NOTE_INSN_DELETED_LABEL:
118 && JUMP_P (NEXT_INSN (insn))
119 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
122 insn = NEXT_INSN (insn);
135 /* It is possible to hit contradictory sequence. For instance:
141 Where barrier belongs to jump_insn, but the note does not. This can be
142 created by removing the basic block originally following
143 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
145 for (insn = last_insn; insn != BB_END (bb); insn = prev)
147 prev = PREV_INSN (insn);
149 switch (NOTE_LINE_NUMBER (insn))
151 case NOTE_INSN_LOOP_END:
152 case NOTE_INSN_BLOCK_END:
153 case NOTE_INSN_DELETED:
154 case NOTE_INSN_DELETED_LABEL:
157 reorder_insns (insn, insn, last_insn);
164 /* Locate or create a label for a given basic block. */
167 label_for_bb (basic_block bb)
169 rtx label = BB_HEAD (bb);
171 if (!LABEL_P (label))
174 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
176 label = block_label (bb);
182 /* Locate the effective beginning and end of the insn chain for each
183 block, as defined by skip_insns_after_block above. */
186 record_effective_endpoints (void)
192 for (insn = get_insns ();
195 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
196 insn = NEXT_INSN (insn))
198 /* No basic blocks at all? */
201 if (PREV_INSN (insn))
202 cfg_layout_function_header =
203 unlink_insn_chain (get_insns (), PREV_INSN (insn));
205 cfg_layout_function_header = NULL_RTX;
207 next_insn = get_insns ();
212 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
213 bb->rbi->header = unlink_insn_chain (next_insn,
214 PREV_INSN (BB_HEAD (bb)));
215 end = skip_insns_after_block (bb);
216 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
217 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
218 next_insn = NEXT_INSN (BB_END (bb));
221 cfg_layout_function_footer = next_insn;
222 if (cfg_layout_function_footer)
223 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
226 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
227 numbers and files. In order to be GGC friendly we need to use separate
228 varrays. This also slightly improve the memory locality in binary search.
229 The _locs array contains locators where the given property change. The
230 block_locators_blocks contains the scope block that is used for all insn
231 locator greater than corresponding block_locators_locs value and smaller
232 than the following one. Similarly for the other properties. */
233 static GTY(()) varray_type block_locators_locs;
234 static GTY(()) varray_type block_locators_blocks;
235 static GTY(()) varray_type line_locators_locs;
236 static GTY(()) varray_type line_locators_lines;
237 static GTY(()) varray_type file_locators_locs;
238 static GTY(()) varray_type file_locators_files;
239 int prologue_locator;
240 int epilogue_locator;
242 /* During the RTL expansion the lexical blocks and line numbers are
243 represented via INSN_NOTEs. Replace them by representation using
247 insn_locators_initialize (void)
250 tree last_block = NULL;
253 int line_number = 0, last_line_number = 0;
254 const char *file_name = NULL, *last_file_name = NULL;
256 prologue_locator = epilogue_locator = 0;
258 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
259 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
260 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
261 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
262 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
263 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
265 for (insn = get_insns (); insn; insn = next)
269 next = NEXT_INSN (insn);
273 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
274 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
275 if (NOTE_LINE_NUMBER (insn) > 0)
277 expanded_location xloc;
278 NOTE_EXPANDED_LOCATION (xloc, insn);
279 line_number = xloc.line;
280 file_name = xloc.file;
284 active = (active_insn_p (insn)
285 && GET_CODE (PATTERN (insn)) != ADDR_VEC
286 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
288 check_block_change (insn, &block);
292 || (!prologue_locator && file_name))
294 if (last_block != block)
297 VARRAY_PUSH_INT (block_locators_locs, loc);
298 VARRAY_PUSH_TREE (block_locators_blocks, block);
301 if (last_line_number != line_number)
304 VARRAY_PUSH_INT (line_locators_locs, loc);
305 VARRAY_PUSH_INT (line_locators_lines, line_number);
306 last_line_number = line_number;
308 if (last_file_name != file_name)
311 VARRAY_PUSH_INT (file_locators_locs, loc);
312 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
313 last_file_name = file_name;
315 if (!prologue_locator && file_name)
316 prologue_locator = loc;
318 epilogue_locator = loc;
320 INSN_LOCATOR (insn) = loc;
324 /* Tag the blocks with a depth number so that change_scope can find
325 the common parent easily. */
326 set_block_levels (DECL_INITIAL (cfun->decl), 0);
328 free_block_changes ();
331 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
332 found in the block tree. */
335 set_block_levels (tree block, int level)
339 BLOCK_NUMBER (block) = level;
340 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
341 block = BLOCK_CHAIN (block);
345 /* Return sope resulting from combination of S1 and S2. */
347 choose_inner_scope (tree s1, tree s2)
353 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
358 /* Emit lexical block notes needed to change scope from S1 to S2. */
361 change_scope (rtx orig_insn, tree s1, tree s2)
363 rtx insn = orig_insn;
364 tree com = NULL_TREE;
365 tree ts1 = s1, ts2 = s2;
370 gcc_assert (ts1 && ts2);
371 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
372 ts1 = BLOCK_SUPERCONTEXT (ts1);
373 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
374 ts2 = BLOCK_SUPERCONTEXT (ts2);
377 ts1 = BLOCK_SUPERCONTEXT (ts1);
378 ts2 = BLOCK_SUPERCONTEXT (ts2);
387 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
388 NOTE_BLOCK (note) = s;
389 s = BLOCK_SUPERCONTEXT (s);
396 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
397 NOTE_BLOCK (insn) = s;
398 s = BLOCK_SUPERCONTEXT (s);
402 /* Return lexical scope block insn belong to. */
404 insn_scope (rtx insn)
406 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
408 int loc = INSN_LOCATOR (insn);
410 /* When block_locators_locs was initialized, the pro- and epilogue
411 insns didn't exist yet and can therefore not be found this way.
412 But we know that they belong to the outer most block of the
414 Without this test, the prologue would be put inside the block of
415 the first valid instruction in the function and when that first
416 insn is part of an inlined function then the low_pc of that
417 inlined function is messed up. Likewise for the epilogue and
418 the last valid instruction. */
419 if (loc == prologue_locator || loc == epilogue_locator)
420 return DECL_INITIAL (cfun->decl);
426 int pos = (min + max) / 2;
427 int tmp = VARRAY_INT (block_locators_locs, pos);
429 if (tmp <= loc && min != pos)
431 else if (tmp > loc && max != pos)
439 return VARRAY_TREE (block_locators_blocks, min);
442 /* Return line number of the statement specified by the locator. */
444 locator_line (int loc)
446 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
453 int pos = (min + max) / 2;
454 int tmp = VARRAY_INT (line_locators_locs, pos);
456 if (tmp <= loc && min != pos)
458 else if (tmp > loc && max != pos)
466 return VARRAY_INT (line_locators_lines, min);
469 /* Return line number of the statement that produced this insn. */
473 return locator_line (INSN_LOCATOR (insn));
476 /* Return source file of the statement specified by LOC. */
478 locator_file (int loc)
480 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
487 int pos = (min + max) / 2;
488 int tmp = VARRAY_INT (file_locators_locs, pos);
490 if (tmp <= loc && min != pos)
492 else if (tmp > loc && max != pos)
500 return VARRAY_CHAR_PTR (file_locators_files, min);
503 /* Return source file of the statement that produced this insn. */
507 return locator_file (INSN_LOCATOR (insn));
510 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
511 on the scope tree and the newly reordered instructions. */
514 reemit_insn_block_notes (void)
516 tree cur_block = DECL_INITIAL (cfun->decl);
520 if (!active_insn_p (insn))
521 insn = next_active_insn (insn);
522 for (; insn; insn = next_active_insn (insn))
526 /* Avoid putting scope notes between jump table and its label. */
528 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
529 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
532 this_block = insn_scope (insn);
533 /* For sequences compute scope resulting from merging all scopes
534 of instructions nested inside. */
535 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
538 rtx body = PATTERN (insn);
541 for (i = 0; i < XVECLEN (body, 0); i++)
542 this_block = choose_inner_scope (this_block,
543 insn_scope (XVECEXP (body, 0, i)));
548 if (this_block != cur_block)
550 change_scope (insn, cur_block, this_block);
551 cur_block = this_block;
555 /* change_scope emits before the insn, not after. */
556 note = emit_note (NOTE_INSN_DELETED);
557 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
563 /* Given a reorder chain, rearrange the code to match. */
566 fixup_reorder_chain (void)
568 basic_block bb, prev_bb;
572 if (cfg_layout_function_header)
574 set_first_insn (cfg_layout_function_header);
575 insn = cfg_layout_function_header;
576 while (NEXT_INSN (insn))
577 insn = NEXT_INSN (insn);
580 /* First do the bulk reordering -- rechain the blocks without regard to
581 the needed changes to jumps and labels. */
583 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
585 bb = bb->rbi->next, index++)
590 NEXT_INSN (insn) = bb->rbi->header;
592 set_first_insn (bb->rbi->header);
593 PREV_INSN (bb->rbi->header) = insn;
594 insn = bb->rbi->header;
595 while (NEXT_INSN (insn))
596 insn = NEXT_INSN (insn);
599 NEXT_INSN (insn) = BB_HEAD (bb);
601 set_first_insn (BB_HEAD (bb));
602 PREV_INSN (BB_HEAD (bb)) = insn;
606 NEXT_INSN (insn) = bb->rbi->footer;
607 PREV_INSN (bb->rbi->footer) = insn;
608 while (NEXT_INSN (insn))
609 insn = NEXT_INSN (insn);
613 gcc_assert (index == n_basic_blocks);
615 NEXT_INSN (insn) = cfg_layout_function_footer;
616 if (cfg_layout_function_footer)
617 PREV_INSN (cfg_layout_function_footer) = insn;
619 while (NEXT_INSN (insn))
620 insn = NEXT_INSN (insn);
622 set_last_insn (insn);
623 #ifdef ENABLE_CHECKING
624 verify_insn_chain ();
626 delete_dead_jumptables ();
628 /* Now add jumps and labels as needed to match the blocks new
631 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
633 edge e_fall, e_taken, e;
639 if (EDGE_COUNT (bb->succs) == 0)
642 /* Find the old fallthru edge, and another non-EH edge for
644 e_taken = e_fall = NULL;
646 FOR_EACH_EDGE (e, ei, bb->succs)
647 if (e->flags & EDGE_FALLTHRU)
649 else if (! (e->flags & EDGE_EH))
652 bb_end_insn = BB_END (bb);
653 if (JUMP_P (bb_end_insn))
655 if (any_condjump_p (bb_end_insn))
657 /* If the old fallthru is still next, nothing to do. */
658 if (bb->rbi->next == e_fall->dest
659 || e_fall->dest == EXIT_BLOCK_PTR)
662 /* The degenerated case of conditional jump jumping to the next
663 instruction can happen on target having jumps with side
666 Create temporarily the duplicated edge representing branch.
667 It will get unidentified by force_nonfallthru_and_redirect
668 that would otherwise get confused by fallthru edge not pointing
669 to the next basic block. */
676 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
678 redirected = redirect_jump (BB_END (bb),
679 block_label (bb), 0);
680 gcc_assert (redirected);
682 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
685 int prob = INTVAL (XEXP (note, 0));
687 e_fake->probability = prob;
688 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
689 e_fall->probability -= e_fall->probability;
690 e_fall->count -= e_fake->count;
691 if (e_fall->probability < 0)
692 e_fall->probability = 0;
693 if (e_fall->count < 0)
697 /* There is one special case: if *neither* block is next,
698 such as happens at the very end of a function, then we'll
699 need to add a new unconditional jump. Choose the taken
700 edge based on known or assumed probability. */
701 else if (bb->rbi->next != e_taken->dest)
703 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
706 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
707 && invert_jump (bb_end_insn,
708 (e_fall->dest == EXIT_BLOCK_PTR
710 : label_for_bb (e_fall->dest)), 0))
712 e_fall->flags &= ~EDGE_FALLTHRU;
713 #ifdef ENABLE_CHECKING
714 gcc_assert (could_fall_through
715 (e_taken->src, e_taken->dest));
717 e_taken->flags |= EDGE_FALLTHRU;
718 update_br_prob_note (bb);
719 e = e_fall, e_fall = e_taken, e_taken = e;
723 /* If the "jumping" edge is a crossing edge, and the fall
724 through edge is non-crossing, leave things as they are. */
725 else if ((e_taken->flags & EDGE_CROSSING)
726 && !(e_fall->flags & EDGE_CROSSING))
729 /* Otherwise we can try to invert the jump. This will
730 basically never fail, however, keep up the pretense. */
731 else if (invert_jump (bb_end_insn,
732 (e_fall->dest == EXIT_BLOCK_PTR
734 : label_for_bb (e_fall->dest)), 0))
736 e_fall->flags &= ~EDGE_FALLTHRU;
737 #ifdef ENABLE_CHECKING
738 gcc_assert (could_fall_through
739 (e_taken->src, e_taken->dest));
741 e_taken->flags |= EDGE_FALLTHRU;
742 update_br_prob_note (bb);
748 /* Otherwise we have some return, switch or computed
749 jump. In the 99% case, there should not have been a
751 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
757 /* No fallthru implies a noreturn function with EH edges, or
758 something similarly bizarre. In any case, we don't need to
763 /* If the fallthru block is still next, nothing to do. */
764 if (bb->rbi->next == e_fall->dest)
767 /* A fallthru to exit block. */
768 if (e_fall->dest == EXIT_BLOCK_PTR)
772 /* We got here if we need to add a new jump insn. */
773 nb = force_nonfallthru (e_fall);
776 initialize_bb_rbi (nb);
777 nb->rbi->visited = 1;
778 nb->rbi->next = bb->rbi->next;
780 /* Don't process this new block. */
784 /* Make sure new bb is tagged for correct section (same as
785 fall-thru source, since you cannot fall-throu across
786 section boundaries). */
787 BB_COPY_PARTITION (e_fall->src, EDGE_PRED (bb, 0)->src);
788 if (flag_reorder_blocks_and_partition
789 && targetm.have_named_sections)
791 if (BB_PARTITION (EDGE_PRED (bb, 0)->src) == BB_COLD_PARTITION)
794 rtx note = BB_HEAD (e_fall->src);
796 while (!INSN_P (note)
797 && note != BB_END (e_fall->src))
798 note = NEXT_INSN (note);
800 new_note = emit_note_before
801 (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
803 NOTE_BASIC_BLOCK (new_note) = bb;
805 if (JUMP_P (BB_END (bb))
806 && !any_condjump_p (BB_END (bb))
807 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
808 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
809 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
814 /* Put basic_block_info in the new order. */
818 fprintf (dump_file, "Reordered sequence:\n");
819 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
821 bb = bb->rbi->next, index++)
823 fprintf (dump_file, " %i ", index);
824 if (bb->rbi->original)
825 fprintf (dump_file, "duplicate of %i ",
826 bb->rbi->original->index);
827 else if (forwarder_block_p (bb)
828 && !LABEL_P (BB_HEAD (bb)))
829 fprintf (dump_file, "compensation ");
831 fprintf (dump_file, "bb %i ", bb->index);
832 fprintf (dump_file, " [%i]\n", bb->frequency);
836 prev_bb = ENTRY_BLOCK_PTR;
837 bb = ENTRY_BLOCK_PTR->next_bb;
840 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
843 BASIC_BLOCK (index) = bb;
845 update_unlikely_executed_notes (bb);
847 bb->prev_bb = prev_bb;
848 prev_bb->next_bb = bb;
850 prev_bb->next_bb = EXIT_BLOCK_PTR;
851 EXIT_BLOCK_PTR->prev_bb = prev_bb;
853 /* Annoying special case - jump around dead jumptables left in the code. */
859 FOR_EACH_EDGE (e, ei, bb->succs)
860 if (e->flags & EDGE_FALLTHRU)
863 if (e && !can_fallthru (e->src, e->dest))
864 force_nonfallthru (e);
868 /* Update the basic block number information in any
869 NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
872 update_unlikely_executed_notes (basic_block bb)
876 for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
877 cur_insn = NEXT_INSN (cur_insn))
878 if (NOTE_P (cur_insn)
879 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
880 NOTE_BASIC_BLOCK (cur_insn) = bb;
883 /* Perform sanity checks on the insn chain.
884 1. Check that next/prev pointers are consistent in both the forward and
886 2. Count insns in chain, going both directions, and check if equal.
887 3. Check that get_last_insn () returns the actual end of chain. */
890 verify_insn_chain (void)
893 int insn_cnt1, insn_cnt2;
895 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
897 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
898 gcc_assert (PREV_INSN (x) == prevx);
900 gcc_assert (prevx == get_last_insn ());
902 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
904 nextx = x, insn_cnt2++, x = PREV_INSN (x))
905 gcc_assert (NEXT_INSN (x) == nextx);
907 gcc_assert (insn_cnt1 == insn_cnt2);
910 /* If we have assembler epilogues, the block falling through to exit must
911 be the last one in the reordered chain when we reach final. Ensure
912 that this condition is met. */
914 fixup_fallthru_exit_predecessor (void)
918 basic_block bb = NULL;
920 /* This transformation is not valid before reload, because we might
921 separate a call from the instruction that copies the return
923 gcc_assert (reload_completed);
925 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
926 if (e->flags & EDGE_FALLTHRU)
929 if (bb && bb->rbi->next)
931 basic_block c = ENTRY_BLOCK_PTR->next_bb;
933 /* If the very first block is the one with the fall-through exit
934 edge, we have to split that block. */
937 bb = split_block (bb, NULL)->dest;
938 initialize_bb_rbi (bb);
939 bb->rbi->next = c->rbi->next;
941 bb->rbi->footer = c->rbi->footer;
942 c->rbi->footer = NULL;
945 while (c->rbi->next != bb)
948 c->rbi->next = bb->rbi->next;
953 bb->rbi->next = NULL;
957 /* Return true in case it is possible to duplicate the basic block BB. */
959 /* We do not want to declare the function in a header file, since it should
960 only be used through the cfghooks interface, and we do not want to move
961 it to cfgrtl.c since it would require also moving quite a lot of related
963 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
966 cfg_layout_can_duplicate_bb_p (basic_block bb)
968 /* Do not attempt to duplicate tablejumps, as we need to unshare
969 the dispatch table. This is difficult to do, as the instructions
970 computing jump destination may be hoisted outside the basic block. */
971 if (tablejump_p (BB_END (bb), NULL, NULL))
974 /* Do not duplicate blocks containing insns that can't be copied. */
975 if (targetm.cannot_copy_insn_p)
977 rtx insn = BB_HEAD (bb);
980 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
982 if (insn == BB_END (bb))
984 insn = NEXT_INSN (insn);
992 duplicate_insn_chain (rtx from, rtx to)
996 /* Avoid updating of boundaries of previous basic block. The
997 note will get removed from insn stream in fixup. */
998 last = emit_note (NOTE_INSN_DELETED);
1000 /* Create copy at the end of INSN chain. The chain will
1001 be reordered later. */
1002 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1004 switch (GET_CODE (insn))
1009 /* Avoid copying of dispatch tables. We never duplicate
1010 tablejumps, so this can hit only in case the table got
1011 moved far from original jump. */
1012 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1013 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1015 emit_copy_of_insn_after (insn, get_last_insn ());
1026 switch (NOTE_LINE_NUMBER (insn))
1028 /* In case prologue is empty and function contain label
1029 in first BB, we may want to copy the block. */
1030 case NOTE_INSN_PROLOGUE_END:
1032 case NOTE_INSN_LOOP_BEG:
1033 case NOTE_INSN_LOOP_END:
1034 /* Strip down the loop notes - we don't really want to keep
1035 them consistent in loop copies. */
1036 case NOTE_INSN_DELETED:
1037 case NOTE_INSN_DELETED_LABEL:
1038 /* No problem to strip these. */
1039 case NOTE_INSN_EPILOGUE_BEG:
1040 case NOTE_INSN_FUNCTION_END:
1041 /* Debug code expect these notes to exist just once.
1042 Keep them in the master copy.
1043 ??? It probably makes more sense to duplicate them for each
1045 case NOTE_INSN_FUNCTION_BEG:
1046 /* There is always just single entry to function. */
1047 case NOTE_INSN_BASIC_BLOCK:
1050 case NOTE_INSN_REPEATED_LINE_NUMBER:
1051 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1052 emit_note_copy (insn);
1056 /* All other notes should have already been eliminated.
1058 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1060 /* It is possible that no_line_number is set and the note
1061 won't be emitted. */
1062 emit_note_copy (insn);
1069 insn = NEXT_INSN (last);
1073 /* Create a duplicate of the basic block BB. */
1075 /* We do not want to declare the function in a header file, since it should
1076 only be used through the cfghooks interface, and we do not want to move
1077 it to cfgrtl.c since it would require also moving quite a lot of related
1079 extern basic_block cfg_layout_duplicate_bb (basic_block);
1082 cfg_layout_duplicate_bb (basic_block bb)
1087 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1088 new_bb = create_basic_block (insn,
1089 insn ? get_last_insn () : NULL,
1090 EXIT_BLOCK_PTR->prev_bb);
1092 BB_COPY_PARTITION (new_bb, bb);
1093 if (bb->rbi->header)
1095 insn = bb->rbi->header;
1096 while (NEXT_INSN (insn))
1097 insn = NEXT_INSN (insn);
1098 insn = duplicate_insn_chain (bb->rbi->header, insn);
1100 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1103 if (bb->rbi->footer)
1105 insn = bb->rbi->footer;
1106 while (NEXT_INSN (insn))
1107 insn = NEXT_INSN (insn);
1108 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1110 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1113 if (bb->global_live_at_start)
1115 new_bb->global_live_at_start = ALLOC_REG_SET (®_obstack);
1116 new_bb->global_live_at_end = ALLOC_REG_SET (®_obstack);
1117 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1118 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1124 /* Main entry point to this module - initialize the datastructures for
1125 CFG layout changes. It keeps LOOPS up-to-date if not null.
1127 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1128 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1132 cfg_layout_initialize (unsigned int flags)
1136 /* Our algorithm depends on fact that there are no dead jumptables
1140 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1141 initialize_bb_rbi (bb);
1143 cfg_layout_rtl_register_cfg_hooks ();
1145 record_effective_endpoints ();
1147 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1150 /* Splits superblocks. */
1152 break_superblocks (void)
1154 sbitmap superblocks;
1158 superblocks = sbitmap_alloc (last_basic_block);
1159 sbitmap_zero (superblocks);
1162 if (bb->flags & BB_SUPERBLOCK)
1164 bb->flags &= ~BB_SUPERBLOCK;
1165 SET_BIT (superblocks, bb->index);
1171 rebuild_jump_labels (get_insns ());
1172 find_many_sub_basic_blocks (superblocks);
1178 /* Finalize the changes: reorder insn list according to the sequence, enter
1179 compensation code, rebuild scope forest. */
1182 cfg_layout_finalize (void)
1186 #ifdef ENABLE_CHECKING
1187 verify_flow_info ();
1189 rtl_register_cfg_hooks ();
1190 if (reload_completed
1191 #ifdef HAVE_epilogue
1195 fixup_fallthru_exit_predecessor ();
1196 fixup_reorder_chain ();
1198 #ifdef ENABLE_CHECKING
1199 verify_insn_chain ();
1203 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1206 break_superblocks ();
1208 #ifdef ENABLE_CHECKING
1209 verify_flow_info ();
1213 /* Checks whether all N blocks in BBS array can be copied. */
1215 can_copy_bbs_p (basic_block *bbs, unsigned n)
1221 for (i = 0; i < n; i++)
1222 bbs[i]->rbi->duplicated = 1;
1224 for (i = 0; i < n; i++)
1226 /* In case we should redirect abnormal edge during duplication, fail. */
1228 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1229 if ((e->flags & EDGE_ABNORMAL)
1230 && e->dest->rbi->duplicated)
1236 if (!can_duplicate_block_p (bbs[i]))
1244 for (i = 0; i < n; i++)
1245 bbs[i]->rbi->duplicated = 0;
1250 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1251 are placed into array NEW_BBS in the same order. Edges from basic blocks
1252 in BBS are also duplicated and copies of those of them
1253 that lead into BBS are redirected to appropriate newly created block. The
1254 function assigns bbs into loops (copy of basic block bb is assigned to
1255 bb->loop_father->copy loop, so this must be set up correctly in advance)
1256 and updates dominators locally (LOOPS structure that contains the information
1257 about dominators is passed to enable this).
1259 BASE is the superloop to that basic block belongs; if its header or latch
1260 is copied, we do not set the new blocks as header or latch.
1262 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1263 also in the same order. */
1266 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1267 edge *edges, unsigned n_edges, edge *new_edges,
1271 basic_block bb, new_bb, dom_bb;
1274 /* Duplicate bbs, update dominators, assign bbs to loops. */
1275 for (i = 0; i < n; i++)
1279 new_bb = new_bbs[i] = duplicate_block (bb, NULL);
1280 bb->rbi->duplicated = 1;
1282 add_bb_to_loop (new_bb, bb->loop_father->copy);
1283 /* Possibly set header. */
1284 if (bb->loop_father->header == bb && bb->loop_father != base)
1285 new_bb->loop_father->header = new_bb;
1287 if (bb->loop_father->latch == bb && bb->loop_father != base)
1288 new_bb->loop_father->latch = new_bb;
1291 /* Set dominators. */
1292 for (i = 0; i < n; i++)
1295 new_bb = new_bbs[i];
1297 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1298 if (dom_bb->rbi->duplicated)
1300 dom_bb = dom_bb->rbi->copy;
1301 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1305 /* Redirect edges. */
1306 for (j = 0; j < n_edges; j++)
1307 new_edges[j] = NULL;
1308 for (i = 0; i < n; i++)
1311 new_bb = new_bbs[i];
1314 FOR_EACH_EDGE (e, ei, new_bb->succs)
1316 for (j = 0; j < n_edges; j++)
1317 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1320 if (!e->dest->rbi->duplicated)
1322 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1326 /* Clear information about duplicates. */
1327 for (i = 0; i < n; i++)
1328 bbs[i]->rbi->duplicated = 0;
1331 #include "gt-cfglayout.h"