1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
56 #include "insn-attr.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
63 #include "tree-pass.h"
66 static int can_delete_note_p (const_rtx);
67 static int can_delete_label_p (const_rtx);
68 static void commit_one_edge_insertion (edge);
69 static basic_block rtl_split_edge (edge);
70 static bool rtl_move_block_after (basic_block, basic_block);
71 static int rtl_verify_flow_info (void);
72 static basic_block cfg_layout_split_block (basic_block, void *);
73 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
74 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
75 static void cfg_layout_delete_block (basic_block);
76 static void rtl_delete_block (basic_block);
77 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
78 static edge rtl_redirect_edge_and_branch (edge, basic_block);
79 static basic_block rtl_split_block (basic_block, void *);
80 static void rtl_dump_bb (basic_block, FILE *, int, int);
81 static int rtl_verify_flow_info_1 (void);
82 static void rtl_make_forwarder_block (edge);
84 /* Return true if NOTE is not one of the ones that must be kept paired,
85 so that we may simply delete it. */
88 can_delete_note_p (const_rtx note)
90 return (NOTE_KIND (note) == NOTE_INSN_DELETED
91 || NOTE_KIND (note) == NOTE_INSN_BASIC_BLOCK);
94 /* True if a given label can be deleted. */
97 can_delete_label_p (const_rtx label)
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label));
105 /* Delete INSN by patching it out. Return the next insn. */
108 delete_insn (rtx insn)
110 rtx next = NEXT_INSN (insn);
112 bool really_delete = true;
116 /* Some labels can't be directly removed from the INSN chain, as they
117 might be references via variables, constant pool etc.
118 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
119 if (! can_delete_label_p (insn))
121 const char *name = LABEL_NAME (insn);
123 really_delete = false;
124 PUT_CODE (insn, NOTE);
125 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
126 NOTE_DELETED_LABEL_NAME (insn) = name;
129 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
134 /* If this insn has already been deleted, something is very wrong. */
135 gcc_assert (!INSN_DELETED_P (insn));
137 INSN_DELETED_P (insn) = 1;
140 /* If deleting a jump, decrement the use count of the label. Deleting
141 the label itself should happen in the normal course of block merging. */
144 if (JUMP_LABEL (insn)
145 && LABEL_P (JUMP_LABEL (insn)))
146 LABEL_NUSES (JUMP_LABEL (insn))--;
148 /* If there are more targets, remove them too. */
150 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
151 && LABEL_P (XEXP (note, 0)))
153 LABEL_NUSES (XEXP (note, 0))--;
154 remove_note (insn, note);
158 /* Also if deleting any insn that references a label as an operand. */
159 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
160 && LABEL_P (XEXP (note, 0)))
162 LABEL_NUSES (XEXP (note, 0))--;
163 remove_note (insn, note);
167 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
168 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
170 rtx pat = PATTERN (insn);
171 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
172 int len = XVECLEN (pat, diff_vec_p);
175 for (i = 0; i < len; i++)
177 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
179 /* When deleting code in bulk (e.g. removing many unreachable
180 blocks) we can delete a label that's a target of the vector
181 before deleting the vector itself. */
183 LABEL_NUSES (label)--;
190 /* Like delete_insn but also purge dead edges from BB. */
193 delete_insn_and_edges (rtx insn)
199 && BLOCK_FOR_INSN (insn)
200 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
202 x = delete_insn (insn);
204 purge_dead_edges (BLOCK_FOR_INSN (insn));
208 /* Unlink a chain of insns between START and FINISH, leaving notes
209 that must be paired. If CLEAR_BB is true, we set bb field for
210 insns that cannot be removed to NULL. */
213 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
217 /* Unchain the insns one by one. It would be quicker to delete all of these
218 with a single unchaining, rather than one at a time, but we need to keep
222 next = NEXT_INSN (start);
223 if (NOTE_P (start) && !can_delete_note_p (start))
226 next = delete_insn (start);
228 if (clear_bb && !INSN_DELETED_P (start))
229 set_block_for_insn (start, NULL);
237 /* Like delete_insn_chain but also purge dead edges from BB. */
240 delete_insn_chain_and_edges (rtx first, rtx last)
245 && BLOCK_FOR_INSN (last)
246 && BB_END (BLOCK_FOR_INSN (last)) == last)
248 delete_insn_chain (first, last, false);
250 purge_dead_edges (BLOCK_FOR_INSN (last));
253 /* Create a new basic block consisting of the instructions between HEAD and END
254 inclusive. This function is designed to allow fast BB construction - reuses
255 the note and basic block struct in BB_NOTE, if any and do not grow
256 BASIC_BLOCK chain and should be used directly only by CFG construction code.
257 END can be NULL in to create new empty basic block before HEAD. Both END
258 and HEAD can be NULL to create basic block at the end of INSN chain.
259 AFTER is the basic block we should be put after. */
262 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
267 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
270 /* If we found an existing note, thread it back onto the chain. */
278 after = PREV_INSN (head);
282 if (after != bb_note && NEXT_INSN (after) != bb_note)
283 reorder_insns_nobb (bb_note, bb_note, after);
287 /* Otherwise we must create a note and a basic block structure. */
291 init_rtl_bb_info (bb);
294 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
295 else if (LABEL_P (head) && end)
297 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
303 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
309 NOTE_BASIC_BLOCK (bb_note) = bb;
312 /* Always include the bb note in the block. */
313 if (NEXT_INSN (end) == bb_note)
318 bb->index = last_basic_block++;
319 bb->flags = BB_NEW | BB_RTL;
320 link_block (bb, after);
321 SET_BASIC_BLOCK (bb->index, bb);
322 df_bb_refs_record (bb->index, false);
323 update_bb_for_insn (bb);
324 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
326 /* Tag the block so that we know it has been used when considering
327 other basic block notes. */
333 /* Create new basic block consisting of instructions in between HEAD and END
334 and place it to the BB chain after block AFTER. END can be NULL in to
335 create new empty basic block before HEAD. Both END and HEAD can be NULL to
336 create basic block at the end of INSN chain. */
339 rtl_create_basic_block (void *headp, void *endp, basic_block after)
341 rtx head = (rtx) headp, end = (rtx) endp;
344 /* Grow the basic block array if needed. */
345 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
347 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
348 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
353 bb = create_basic_block_structure (head, end, NULL, after);
359 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
361 basic_block newbb = rtl_create_basic_block (head, end, after);
366 /* Delete the insns in a (non-live) block. We physically delete every
367 non-deleted-note insn, and update the flow graph appropriately.
369 Return nonzero if we deleted an exception handler. */
371 /* ??? Preserving all such notes strikes me as wrong. It would be nice
372 to post-process the stream to remove empty blocks, loops, ranges, etc. */
375 rtl_delete_block (basic_block b)
379 /* If the head of this block is a CODE_LABEL, then it might be the
380 label for an exception handler which can't be reached. We need
381 to remove the label from the exception_handler_label list. */
384 maybe_remove_eh_handler (insn);
386 end = get_last_bb_insn (b);
388 /* Selectively delete the entire chain. */
390 delete_insn_chain (insn, end, true);
394 fprintf (dump_file, "deleting block %d\n", b->index);
395 df_bb_delete (b->index);
398 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
401 compute_bb_for_insn (void)
407 rtx end = BB_END (bb);
410 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
412 BLOCK_FOR_INSN (insn) = bb;
419 /* Release the basic_block_for_insn array. */
422 free_bb_for_insn (void)
425 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
426 if (!BARRIER_P (insn))
427 BLOCK_FOR_INSN (insn) = NULL;
432 rest_of_pass_free_cfg (void)
435 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
436 valid at that point so it would be too late to call df_analyze. */
437 if (optimize > 0 && flag_delayed_branch)
445 struct rtl_opt_pass pass_free_cfg =
451 rest_of_pass_free_cfg, /* execute */
454 0, /* static_pass_number */
456 0, /* properties_required */
457 0, /* properties_provided */
458 PROP_cfg, /* properties_destroyed */
459 0, /* todo_flags_start */
460 0, /* todo_flags_finish */
464 /* Return RTX to emit after when we want to emit code on the entry of function. */
466 entry_of_function (void)
468 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
469 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
472 /* Emit INSN at the entry point of the function, ensuring that it is only
473 executed once per function. */
475 emit_insn_at_entry (rtx insn)
477 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
478 edge e = ei_safe_edge (ei);
479 gcc_assert (e->flags & EDGE_FALLTHRU);
481 insert_insn_on_edge (insn, e);
482 commit_edge_insertions ();
485 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
486 (or BARRIER if found) and notify df of the bb change.
487 The insn chain range is inclusive
488 (i.e. both BEGIN and END will be updated. */
491 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
495 end = NEXT_INSN (end);
496 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
497 if (!BARRIER_P (insn))
498 df_insn_change_bb (insn, bb);
501 /* Update BLOCK_FOR_INSN of insns in BB to BB,
502 and notify df of the change. */
505 update_bb_for_insn (basic_block bb)
507 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
511 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
512 note associated with the BLOCK. */
515 first_insn_after_basic_block_note (basic_block block)
519 /* Get the first instruction in the block. */
520 insn = BB_HEAD (block);
522 if (insn == NULL_RTX)
525 insn = NEXT_INSN (insn);
526 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
528 return NEXT_INSN (insn);
531 /* Creates a new basic block just after basic block B by splitting
532 everything after specified instruction I. */
535 rtl_split_block (basic_block bb, void *insnp)
538 rtx insn = (rtx) insnp;
544 insn = first_insn_after_basic_block_note (bb);
547 insn = PREV_INSN (insn);
549 insn = get_last_insn ();
552 /* We probably should check type of the insn so that we do not create
553 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
555 if (insn == BB_END (bb))
556 emit_note_after (NOTE_INSN_DELETED, insn);
558 /* Create the new basic block. */
559 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
560 BB_COPY_PARTITION (new_bb, bb);
563 /* Redirect the outgoing edges. */
564 new_bb->succs = bb->succs;
566 FOR_EACH_EDGE (e, ei, new_bb->succs)
569 /* The new block starts off being dirty. */
570 df_set_bb_dirty (bb);
574 /* Blocks A and B are to be merged into a single block A. The insns
575 are already contiguous. */
578 rtl_merge_blocks (basic_block a, basic_block b)
580 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
581 rtx del_first = NULL_RTX, del_last = NULL_RTX;
585 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
587 /* If there was a CODE_LABEL beginning B, delete it. */
588 if (LABEL_P (b_head))
590 /* This might have been an EH label that no longer has incoming
591 EH edges. Update data structures to match. */
592 maybe_remove_eh_handler (b_head);
594 /* Detect basic blocks with nothing but a label. This can happen
595 in particular at the end of a function. */
599 del_first = del_last = b_head;
600 b_head = NEXT_INSN (b_head);
603 /* Delete the basic block note and handle blocks containing just that
605 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
613 b_head = NEXT_INSN (b_head);
616 /* If there was a jump out of A, delete it. */
621 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
623 || NOTE_INSN_BASIC_BLOCK_P (prev)
624 || prev == BB_HEAD (a))
630 /* If this was a conditional jump, we need to also delete
631 the insn that set cc0. */
632 if (only_sets_cc0_p (prev))
636 prev = prev_nonnote_insn (prev);
643 a_end = PREV_INSN (del_first);
645 else if (BARRIER_P (NEXT_INSN (a_end)))
646 del_first = NEXT_INSN (a_end);
648 /* Delete everything marked above as well as crap that might be
649 hanging out between the two blocks. */
651 delete_insn_chain (del_first, del_last, true);
653 /* Reassociate the insns of B with A. */
656 update_bb_for_insn_chain (a_end, b_end, a);
661 df_bb_delete (b->index);
666 /* Return true when block A and B can be merged. */
669 rtl_can_merge_blocks (basic_block a, basic_block b)
671 /* If we are partitioning hot/cold basic blocks, we don't want to
672 mess up unconditional or indirect jumps that cross between hot
675 Basic block partitioning may result in some jumps that appear to
676 be optimizable (or blocks that appear to be mergeable), but which really
677 must be left untouched (they are required to make it safely across
678 partition boundaries). See the comments at the top of
679 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
681 if (BB_PARTITION (a) != BB_PARTITION (b))
684 /* There must be exactly one edge in between the blocks. */
685 return (single_succ_p (a)
686 && single_succ (a) == b
689 /* Must be simple edge. */
690 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
692 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
693 /* If the jump insn has side effects,
694 we can't kill the edge. */
695 && (!JUMP_P (BB_END (a))
697 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
700 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
704 block_label (basic_block block)
706 if (block == EXIT_BLOCK_PTR)
709 if (!LABEL_P (BB_HEAD (block)))
711 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
714 return BB_HEAD (block);
717 /* Attempt to perform edge redirection by replacing possibly complex jump
718 instruction by unconditional jump or removing jump completely. This can
719 apply only if all edges now point to the same block. The parameters and
720 return values are equivalent to redirect_edge_and_branch. */
723 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
725 basic_block src = e->src;
726 rtx insn = BB_END (src), kill_from;
730 /* If we are partitioning hot/cold basic blocks, we don't want to
731 mess up unconditional or indirect jumps that cross between hot
734 Basic block partitioning may result in some jumps that appear to
735 be optimizable (or blocks that appear to be mergeable), but which really
736 must be left untouched (they are required to make it safely across
737 partition boundaries). See the comments at the top of
738 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
740 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
741 || BB_PARTITION (src) != BB_PARTITION (target))
744 /* We can replace or remove a complex jump only when we have exactly
745 two edges. Also, if we have exactly one outgoing edge, we can
747 if (EDGE_COUNT (src->succs) >= 3
748 /* Verify that all targets will be TARGET. Specifically, the
749 edge that is not E must also go to TARGET. */
750 || (EDGE_COUNT (src->succs) == 2
751 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
754 if (!onlyjump_p (insn))
756 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
759 /* Avoid removing branch with side effects. */
760 set = single_set (insn);
761 if (!set || side_effects_p (set))
764 /* In case we zap a conditional jump, we'll need to kill
765 the cc0 setter too. */
768 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
769 && only_sets_cc0_p (PREV_INSN (insn)))
770 kill_from = PREV_INSN (insn);
773 /* See if we can create the fallthru edge. */
774 if (in_cfglayout || can_fallthru (src, target))
777 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
780 /* Selectively unlink whole insn chain. */
783 rtx insn = src->il.rtl->footer;
785 delete_insn_chain (kill_from, BB_END (src), false);
787 /* Remove barriers but keep jumptables. */
790 if (BARRIER_P (insn))
792 if (PREV_INSN (insn))
793 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
795 src->il.rtl->footer = NEXT_INSN (insn);
796 if (NEXT_INSN (insn))
797 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
801 insn = NEXT_INSN (insn);
805 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
809 /* If this already is simplejump, redirect it. */
810 else if (simplejump_p (insn))
812 if (e->dest == target)
815 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
816 INSN_UID (insn), e->dest->index, target->index);
817 if (!redirect_jump (insn, block_label (target), 0))
819 gcc_assert (target == EXIT_BLOCK_PTR);
824 /* Cannot do anything for target exit block. */
825 else if (target == EXIT_BLOCK_PTR)
828 /* Or replace possibly complicated jump insn by simple jump insn. */
831 rtx target_label = block_label (target);
832 rtx barrier, label, table;
834 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
835 JUMP_LABEL (BB_END (src)) = target_label;
836 LABEL_NUSES (target_label)++;
838 fprintf (dump_file, "Replacing insn %i by jump %i\n",
839 INSN_UID (insn), INSN_UID (BB_END (src)));
842 delete_insn_chain (kill_from, insn, false);
844 /* Recognize a tablejump that we are converting to a
845 simple jump and remove its associated CODE_LABEL
846 and ADDR_VEC or ADDR_DIFF_VEC. */
847 if (tablejump_p (insn, &label, &table))
848 delete_insn_chain (label, table, false);
850 barrier = next_nonnote_insn (BB_END (src));
851 if (!barrier || !BARRIER_P (barrier))
852 emit_barrier_after (BB_END (src));
855 if (barrier != NEXT_INSN (BB_END (src)))
857 /* Move the jump before barrier so that the notes
858 which originally were or were created before jump table are
859 inside the basic block. */
860 rtx new_insn = BB_END (src);
862 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
863 PREV_INSN (barrier), src);
865 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
866 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
868 NEXT_INSN (new_insn) = barrier;
869 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
871 PREV_INSN (new_insn) = PREV_INSN (barrier);
872 PREV_INSN (barrier) = new_insn;
877 /* Keep only one edge out and set proper flags. */
878 if (!single_succ_p (src))
880 gcc_assert (single_succ_p (src));
882 e = single_succ_edge (src);
884 e->flags = EDGE_FALLTHRU;
888 e->probability = REG_BR_PROB_BASE;
889 e->count = src->count;
891 if (e->dest != target)
892 redirect_edge_succ (e, target);
896 /* Redirect edge representing branch of (un)conditional jump or tablejump,
899 redirect_branch_edge (edge e, basic_block target)
902 rtx old_label = BB_HEAD (e->dest);
903 basic_block src = e->src;
904 rtx insn = BB_END (src);
906 /* We can only redirect non-fallthru edges of jump insn. */
907 if (e->flags & EDGE_FALLTHRU)
909 else if (!JUMP_P (insn))
912 /* Recognize a tablejump and adjust all matching cases. */
913 if (tablejump_p (insn, NULL, &tmp))
917 rtx new_label = block_label (target);
919 if (target == EXIT_BLOCK_PTR)
921 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
922 vec = XVEC (PATTERN (tmp), 0);
924 vec = XVEC (PATTERN (tmp), 1);
926 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
927 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
929 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
930 --LABEL_NUSES (old_label);
931 ++LABEL_NUSES (new_label);
934 /* Handle casesi dispatch insns. */
935 if ((tmp = single_set (insn)) != NULL
936 && SET_DEST (tmp) == pc_rtx
937 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
938 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
939 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
941 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
943 --LABEL_NUSES (old_label);
944 ++LABEL_NUSES (new_label);
949 /* ?? We may play the games with moving the named labels from
950 one basic block to the other in case only one computed_jump is
952 if (computed_jump_p (insn)
953 /* A return instruction can't be redirected. */
954 || returnjump_p (insn))
957 /* If the insn doesn't go where we think, we're confused. */
958 gcc_assert (JUMP_LABEL (insn) == old_label);
960 /* If the substitution doesn't succeed, die. This can happen
961 if the back end emitted unrecognizable instructions or if
962 target is exit block on some arches. */
963 if (!redirect_jump (insn, block_label (target), 0))
965 gcc_assert (target == EXIT_BLOCK_PTR);
971 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
972 e->src->index, e->dest->index, target->index);
974 if (e->dest != target)
975 e = redirect_edge_succ_nodup (e, target);
980 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
981 expense of adding new instructions or reordering basic blocks.
983 Function can be also called with edge destination equivalent to the TARGET.
984 Then it should try the simplifications and do nothing if none is possible.
986 Return edge representing the branch if transformation succeeded. Return NULL
988 We still return NULL in case E already destinated TARGET and we didn't
989 managed to simplify instruction stream. */
992 rtl_redirect_edge_and_branch (edge e, basic_block target)
995 basic_block src = e->src;
997 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1000 if (e->dest == target)
1003 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1005 df_set_bb_dirty (src);
1009 ret = redirect_branch_edge (e, target);
1013 df_set_bb_dirty (src);
1017 /* Like force_nonfallthru below, but additionally performs redirection
1018 Used by redirect_edge_and_branch_force. */
1021 force_nonfallthru_and_redirect (edge e, basic_block target)
1023 basic_block jump_block, new_bb = NULL, src = e->src;
1026 int abnormal_edge_flags = 0;
1029 /* In the case the last instruction is conditional jump to the next
1030 instruction, first redirect the jump itself and then continue
1031 by creating a basic block afterwards to redirect fallthru edge. */
1032 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1033 && any_condjump_p (BB_END (e->src))
1034 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1037 edge b = unchecked_make_edge (e->src, target, 0);
1040 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1041 gcc_assert (redirected);
1043 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1046 int prob = INTVAL (XEXP (note, 0));
1048 b->probability = prob;
1049 b->count = e->count * prob / REG_BR_PROB_BASE;
1050 e->probability -= e->probability;
1051 e->count -= b->count;
1052 if (e->probability < 0)
1059 if (e->flags & EDGE_ABNORMAL)
1061 /* Irritating special case - fallthru edge to the same block as abnormal
1063 We can't redirect abnormal edge, but we still can split the fallthru
1064 one and create separate abnormal edge to original destination.
1065 This allows bb-reorder to make such edge non-fallthru. */
1066 gcc_assert (e->dest == target);
1067 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1068 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1072 gcc_assert (e->flags & EDGE_FALLTHRU);
1073 if (e->src == ENTRY_BLOCK_PTR)
1075 /* We can't redirect the entry block. Create an empty block
1076 at the start of the function which we use to add the new
1082 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1084 /* Change the existing edge's source to be the new block, and add
1085 a new edge from the entry block to the new block. */
1087 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1091 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1101 VEC_safe_push (edge, gc, bb->succs, e);
1102 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1106 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1108 /* Create the new structures. */
1110 /* If the old block ended with a tablejump, skip its table
1111 by searching forward from there. Otherwise start searching
1112 forward from the last instruction of the old block. */
1113 if (!tablejump_p (BB_END (e->src), NULL, ¬e))
1114 note = BB_END (e->src);
1115 note = NEXT_INSN (note);
1117 jump_block = create_basic_block (note, NULL, e->src);
1118 jump_block->count = e->count;
1119 jump_block->frequency = EDGE_FREQUENCY (e);
1120 jump_block->loop_depth = target->loop_depth;
1122 /* Make sure new block ends up in correct hot/cold section. */
1124 BB_COPY_PARTITION (jump_block, e->src);
1125 if (flag_reorder_blocks_and_partition
1126 && targetm.have_named_sections
1127 && JUMP_P (BB_END (jump_block))
1128 && !any_condjump_p (BB_END (jump_block))
1129 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1130 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1133 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1134 new_edge->probability = e->probability;
1135 new_edge->count = e->count;
1137 /* Redirect old edge. */
1138 redirect_edge_pred (e, jump_block);
1139 e->probability = REG_BR_PROB_BASE;
1141 new_bb = jump_block;
1144 jump_block = e->src;
1146 if (e->goto_locus && e->goto_block == NULL)
1147 loc = e->goto_locus;
1150 e->flags &= ~EDGE_FALLTHRU;
1151 if (target == EXIT_BLOCK_PTR)
1154 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1161 rtx label = block_label (target);
1162 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1163 JUMP_LABEL (BB_END (jump_block)) = label;
1164 LABEL_NUSES (label)++;
1167 emit_barrier_after (BB_END (jump_block));
1168 redirect_edge_succ_nodup (e, target);
1170 if (abnormal_edge_flags)
1171 make_edge (src, target, abnormal_edge_flags);
1173 df_mark_solutions_dirty ();
1177 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1178 (and possibly create new basic block) to make edge non-fallthru.
1179 Return newly created BB or NULL if none. */
1182 force_nonfallthru (edge e)
1184 return force_nonfallthru_and_redirect (e, e->dest);
1187 /* Redirect edge even at the expense of creating new jump insn or
1188 basic block. Return new basic block if created, NULL otherwise.
1189 Conversion must be possible. */
1192 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1194 if (redirect_edge_and_branch (e, target)
1195 || e->dest == target)
1198 /* In case the edge redirection failed, try to force it to be non-fallthru
1199 and redirect newly created simplejump. */
1200 df_set_bb_dirty (e->src);
1201 return force_nonfallthru_and_redirect (e, target);
1204 /* The given edge should potentially be a fallthru edge. If that is in
1205 fact true, delete the jump and barriers that are in the way. */
1208 rtl_tidy_fallthru_edge (edge e)
1211 basic_block b = e->src, c = b->next_bb;
1213 /* ??? In a late-running flow pass, other folks may have deleted basic
1214 blocks by nopping out blocks, leaving multiple BARRIERs between here
1215 and the target label. They ought to be chastised and fixed.
1217 We can also wind up with a sequence of undeletable labels between
1218 one block and the next.
1220 So search through a sequence of barriers, labels, and notes for
1221 the head of block C and assert that we really do fall through. */
1223 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1227 /* Remove what will soon cease being the jump insn from the source block.
1228 If block B consisted only of this single jump, turn it into a deleted
1233 && (any_uncondjump_p (q)
1234 || single_succ_p (b)))
1237 /* If this was a conditional jump, we need to also delete
1238 the insn that set cc0. */
1239 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1246 /* Selectively unlink the sequence. */
1247 if (q != PREV_INSN (BB_HEAD (c)))
1248 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1250 e->flags |= EDGE_FALLTHRU;
1253 /* Should move basic block BB after basic block AFTER. NIY. */
1256 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1257 basic_block after ATTRIBUTE_UNUSED)
1262 /* Split a (typically critical) edge. Return the new block.
1263 The edge must not be abnormal.
1265 ??? The code generally expects to be called on critical edges.
1266 The case of a block ending in an unconditional jump to a
1267 block with multiple predecessors is not handled optimally. */
1270 rtl_split_edge (edge edge_in)
1275 /* Abnormal edges cannot be split. */
1276 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1278 /* We are going to place the new block in front of edge destination.
1279 Avoid existence of fallthru predecessors. */
1280 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1285 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1286 if (e->flags & EDGE_FALLTHRU)
1290 force_nonfallthru (e);
1293 /* Create the basic block note. */
1294 if (edge_in->dest != EXIT_BLOCK_PTR)
1295 before = BB_HEAD (edge_in->dest);
1299 /* If this is a fall through edge to the exit block, the blocks might be
1300 not adjacent, and the right place is the after the source. */
1301 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1303 before = NEXT_INSN (BB_END (edge_in->src));
1304 bb = create_basic_block (before, NULL, edge_in->src);
1305 BB_COPY_PARTITION (bb, edge_in->src);
1309 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1310 /* ??? Why not edge_in->dest->prev_bb here? */
1311 BB_COPY_PARTITION (bb, edge_in->dest);
1314 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1316 /* For non-fallthru edges, we must adjust the predecessor's
1317 jump instruction to target our new block. */
1318 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1320 edge redirected = redirect_edge_and_branch (edge_in, bb);
1321 gcc_assert (redirected);
1324 redirect_edge_succ (edge_in, bb);
1329 /* Queue instructions for insertion on an edge between two basic blocks.
1330 The new instructions and basic blocks (if any) will not appear in the
1331 CFG until commit_edge_insertions is called. */
1334 insert_insn_on_edge (rtx pattern, edge e)
1336 /* We cannot insert instructions on an abnormal critical edge.
1337 It will be easier to find the culprit if we die now. */
1338 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1340 if (e->insns.r == NULL_RTX)
1343 push_to_sequence (e->insns.r);
1345 emit_insn (pattern);
1347 e->insns.r = get_insns ();
1351 /* Update the CFG for the instructions queued on edge E. */
1354 commit_one_edge_insertion (edge e)
1356 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1357 basic_block bb = NULL;
1359 /* Pull the insns off the edge now since the edge might go away. */
1361 e->insns.r = NULL_RTX;
1363 if (!before && !after)
1365 /* Figure out where to put these things. If the destination has
1366 one predecessor, insert there. Except for the exit block. */
1367 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1371 /* Get the location correct wrt a code label, and "nice" wrt
1372 a basic block note, and before everything else. */
1375 tmp = NEXT_INSN (tmp);
1376 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1377 tmp = NEXT_INSN (tmp);
1378 if (tmp == BB_HEAD (bb))
1381 after = PREV_INSN (tmp);
1383 after = get_last_insn ();
1386 /* If the source has one successor and the edge is not abnormal,
1387 insert there. Except for the entry block. */
1388 else if ((e->flags & EDGE_ABNORMAL) == 0
1389 && single_succ_p (e->src)
1390 && e->src != ENTRY_BLOCK_PTR)
1394 /* It is possible to have a non-simple jump here. Consider a target
1395 where some forms of unconditional jumps clobber a register. This
1396 happens on the fr30 for example.
1398 We know this block has a single successor, so we can just emit
1399 the queued insns before the jump. */
1400 if (JUMP_P (BB_END (bb)))
1401 before = BB_END (bb);
1404 /* We'd better be fallthru, or we've lost track of
1406 gcc_assert (e->flags & EDGE_FALLTHRU);
1408 after = BB_END (bb);
1411 /* Otherwise we must split the edge. */
1414 bb = split_edge (e);
1415 after = BB_END (bb);
1417 if (flag_reorder_blocks_and_partition
1418 && targetm.have_named_sections
1419 && e->src != ENTRY_BLOCK_PTR
1420 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1421 && !(e->flags & EDGE_CROSSING))
1423 rtx bb_note, cur_insn;
1426 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1427 cur_insn = NEXT_INSN (cur_insn))
1428 if (NOTE_INSN_BASIC_BLOCK_P (cur_insn))
1434 if (JUMP_P (BB_END (bb))
1435 && !any_condjump_p (BB_END (bb))
1436 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1437 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
1442 /* Now that we've found the spot, do the insertion. */
1446 emit_insn_before_noloc (insns, before, bb);
1447 last = prev_nonnote_insn (before);
1450 last = emit_insn_after_noloc (insns, after, bb);
1452 if (returnjump_p (last))
1454 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1455 This is not currently a problem because this only happens
1456 for the (single) epilogue, which already has a fallthru edge
1459 e = single_succ_edge (bb);
1460 gcc_assert (e->dest == EXIT_BLOCK_PTR
1461 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1463 e->flags &= ~EDGE_FALLTHRU;
1464 emit_barrier_after (last);
1467 delete_insn (before);
1470 gcc_assert (!JUMP_P (last));
1472 /* Mark the basic block for find_many_sub_basic_blocks. */
1473 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1477 /* Update the CFG for all queued instructions. */
1480 commit_edge_insertions (void)
1484 bool changed = false;
1486 #ifdef ENABLE_CHECKING
1487 verify_flow_info ();
1490 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1495 FOR_EACH_EDGE (e, ei, bb->succs)
1499 commit_one_edge_insertion (e);
1506 /* In the old rtl CFG API, it was OK to insert control flow on an
1507 edge, apparently? In cfglayout mode, this will *not* work, and
1508 the caller is responsible for making sure that control flow is
1509 valid at all times. */
1510 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1513 blocks = sbitmap_alloc (last_basic_block);
1514 sbitmap_zero (blocks);
1518 SET_BIT (blocks, bb->index);
1519 /* Check for forgotten bb->aux values before commit_edge_insertions
1521 gcc_assert (bb->aux == &bb->aux);
1524 find_many_sub_basic_blocks (blocks);
1525 sbitmap_free (blocks);
1529 /* Print out RTL-specific basic block information (live information
1530 at start and end). */
1533 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1539 s_indent = (char *) alloca ((size_t) indent + 1);
1540 memset (s_indent, ' ', (size_t) indent);
1541 s_indent[indent] = '\0';
1545 df_dump_top (bb, outf);
1549 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1550 insn = NEXT_INSN (insn))
1551 print_rtl_single (outf, insn);
1555 df_dump_bottom (bb, outf);
1561 /* Like print_rtl, but also print out live information for the start of each
1565 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1569 fprintf (outf, "(nil)\n");
1572 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1573 int max_uid = get_max_uid ();
1574 basic_block *start = XCNEWVEC (basic_block, max_uid);
1575 basic_block *end = XCNEWVEC (basic_block, max_uid);
1576 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1581 df_dump_start (outf);
1583 FOR_EACH_BB_REVERSE (bb)
1587 start[INSN_UID (BB_HEAD (bb))] = bb;
1588 end[INSN_UID (BB_END (bb))] = bb;
1589 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1591 enum bb_state state = IN_MULTIPLE_BB;
1593 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1595 in_bb_p[INSN_UID (x)] = state;
1597 if (x == BB_END (bb))
1602 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1605 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1610 fprintf (outf, ";; Start of basic block (");
1611 FOR_EACH_EDGE (e, ei, bb->preds)
1612 fprintf (outf, " %d", e->src->index);
1613 fprintf (outf, ") -> %d\n", bb->index);
1617 df_dump_top (bb, outf);
1620 FOR_EACH_EDGE (e, ei, bb->preds)
1622 fputs (";; Pred edge ", outf);
1623 dump_edge_info (outf, e, 0);
1628 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1629 && !NOTE_P (tmp_rtx)
1630 && !BARRIER_P (tmp_rtx))
1631 fprintf (outf, ";; Insn is not within a basic block\n");
1632 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1633 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1635 did_output = print_rtl_single (outf, tmp_rtx);
1637 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1642 fprintf (outf, ";; End of basic block %d -> (", bb->index);
1643 FOR_EACH_EDGE (e, ei, bb->succs)
1644 fprintf (outf, " %d", e->dest->index);
1645 fprintf (outf, ")\n");
1649 df_dump_bottom (bb, outf);
1653 FOR_EACH_EDGE (e, ei, bb->succs)
1655 fputs (";; Succ edge ", outf);
1656 dump_edge_info (outf, e, 1);
1669 if (crtl->epilogue_delay_list != 0)
1671 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1672 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1673 tmp_rtx = XEXP (tmp_rtx, 1))
1674 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1679 update_br_prob_note (basic_block bb)
1682 if (!JUMP_P (BB_END (bb)))
1684 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1685 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1687 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1690 /* Get the last insn associated with block BB (that includes barriers and
1691 tablejumps after BB). */
1693 get_last_bb_insn (basic_block bb)
1696 rtx end = BB_END (bb);
1698 /* Include any jump table following the basic block. */
1699 if (tablejump_p (end, NULL, &tmp))
1702 /* Include any barriers that may follow the basic block. */
1703 tmp = next_nonnote_insn (end);
1704 while (tmp && BARRIER_P (tmp))
1707 tmp = next_nonnote_insn (end);
1713 /* Verify the CFG and RTL consistency common for both underlying RTL and
1716 Currently it does following checks:
1718 - overlapping of basic blocks
1719 - insns with wrong BLOCK_FOR_INSN pointers
1720 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1721 - tails of basic blocks (ensure that boundary is necessary)
1722 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1723 and NOTE_INSN_BASIC_BLOCK
1724 - verify that no fall_thru edge crosses hot/cold partition boundaries
1725 - verify that there are no pending RTL branch predictions
1727 In future it can be extended check a lot of other stuff as well
1728 (reachability of basic blocks, life information, etc. etc.). */
1731 rtl_verify_flow_info_1 (void)
1737 /* Check the general integrity of the basic blocks. */
1738 FOR_EACH_BB_REVERSE (bb)
1742 if (!(bb->flags & BB_RTL))
1744 error ("BB_RTL flag not set for block %d", bb->index);
1748 FOR_BB_INSNS (bb, insn)
1749 if (BLOCK_FOR_INSN (insn) != bb)
1751 error ("insn %d basic block pointer is %d, should be %d",
1753 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1758 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1759 if (!BARRIER_P (insn)
1760 && BLOCK_FOR_INSN (insn) != NULL)
1762 error ("insn %d in header of bb %d has non-NULL basic block",
1763 INSN_UID (insn), bb->index);
1766 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1767 if (!BARRIER_P (insn)
1768 && BLOCK_FOR_INSN (insn) != NULL)
1770 error ("insn %d in footer of bb %d has non-NULL basic block",
1771 INSN_UID (insn), bb->index);
1776 /* Now check the basic blocks (boundaries etc.) */
1777 FOR_EACH_BB_REVERSE (bb)
1779 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1780 edge e, fallthru = NULL;
1784 if (JUMP_P (BB_END (bb))
1785 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1786 && EDGE_COUNT (bb->succs) >= 2
1787 && any_condjump_p (BB_END (bb)))
1789 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1790 && profile_status != PROFILE_ABSENT)
1792 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1793 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1797 FOR_EACH_EDGE (e, ei, bb->succs)
1799 if (e->flags & EDGE_FALLTHRU)
1801 n_fallthru++, fallthru = e;
1802 if ((e->flags & EDGE_CROSSING)
1803 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1804 && e->src != ENTRY_BLOCK_PTR
1805 && e->dest != EXIT_BLOCK_PTR))
1807 error ("fallthru edge crosses section boundary (bb %i)",
1813 if ((e->flags & ~(EDGE_DFS_BACK
1815 | EDGE_IRREDUCIBLE_LOOP
1817 | EDGE_CROSSING)) == 0)
1820 if (e->flags & EDGE_ABNORMAL_CALL)
1823 if (e->flags & EDGE_EH)
1825 else if (e->flags & EDGE_ABNORMAL)
1829 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1830 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1832 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1836 && (!JUMP_P (BB_END (bb))
1837 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1838 || any_condjump_p (BB_END (bb))))))
1840 error ("too many outgoing branch edges from bb %i", bb->index);
1843 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1845 error ("fallthru edge after unconditional jump %i", bb->index);
1848 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1850 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1853 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1854 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1856 error ("wrong amount of branch edges after conditional jump %i",
1860 if (n_call && !CALL_P (BB_END (bb)))
1862 error ("call edges for non-call insn in bb %i", bb->index);
1866 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1867 && (!JUMP_P (BB_END (bb))
1868 || any_condjump_p (BB_END (bb))
1869 || any_uncondjump_p (BB_END (bb))))
1871 error ("abnormal edges for no purpose in bb %i", bb->index);
1875 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1876 /* We may have a barrier inside a basic block before dead code
1877 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1878 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1881 if (! BLOCK_FOR_INSN (x))
1883 ("insn %d inside basic block %d but block_for_insn is NULL",
1884 INSN_UID (x), bb->index);
1887 ("insn %d inside basic block %d but block_for_insn is %i",
1888 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1893 /* OK pointers are correct. Now check the header of basic
1894 block. It ought to contain optional CODE_LABEL followed
1895 by NOTE_BASIC_BLOCK. */
1899 if (BB_END (bb) == x)
1901 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1909 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1911 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1916 if (BB_END (bb) == x)
1917 /* Do checks for empty blocks here. */
1920 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1922 if (NOTE_INSN_BASIC_BLOCK_P (x))
1924 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1925 INSN_UID (x), bb->index);
1929 if (x == BB_END (bb))
1932 if (control_flow_insn_p (x))
1934 error ("in basic block %d:", bb->index);
1935 fatal_insn ("flow control insn inside a basic block", x);
1944 /* Verify the CFG and RTL consistency common for both underlying RTL and
1947 Currently it does following checks:
1948 - all checks of rtl_verify_flow_info_1
1949 - test head/end pointers
1950 - check that all insns are in the basic blocks
1951 (except the switch handling code, barriers and notes)
1952 - check that all returns are followed by barriers
1953 - check that all fallthru edge points to the adjacent blocks. */
1956 rtl_verify_flow_info (void)
1959 int err = rtl_verify_flow_info_1 ();
1961 rtx last_head = get_last_insn ();
1962 basic_block *bb_info;
1964 const rtx rtx_first = get_insns ();
1965 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1966 const int max_uid = get_max_uid ();
1968 bb_info = XCNEWVEC (basic_block, max_uid);
1970 FOR_EACH_BB_REVERSE (bb)
1974 rtx head = BB_HEAD (bb);
1975 rtx end = BB_END (bb);
1977 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1979 /* Verify the end of the basic block is in the INSN chain. */
1983 /* And that the code outside of basic blocks has NULL bb field. */
1985 && BLOCK_FOR_INSN (x) != NULL)
1987 error ("insn %d outside of basic blocks has non-NULL bb field",
1995 error ("end insn %d for block %d not found in the insn stream",
1996 INSN_UID (end), bb->index);
2000 /* Work backwards from the end to the head of the basic block
2001 to verify the head is in the RTL chain. */
2002 for (; x != NULL_RTX; x = PREV_INSN (x))
2004 /* While walking over the insn chain, verify insns appear
2005 in only one basic block. */
2006 if (bb_info[INSN_UID (x)] != NULL)
2008 error ("insn %d is in multiple basic blocks (%d and %d)",
2009 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2013 bb_info[INSN_UID (x)] = bb;
2020 error ("head insn %d for block %d not found in the insn stream",
2021 INSN_UID (head), bb->index);
2025 last_head = PREV_INSN (x);
2027 FOR_EACH_EDGE (e, ei, bb->succs)
2028 if (e->flags & EDGE_FALLTHRU)
2034 /* Ensure existence of barrier in BB with no fallthru edges. */
2035 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
2036 insn = NEXT_INSN (insn))
2038 || NOTE_INSN_BASIC_BLOCK_P (insn))
2040 error ("missing barrier after block %i", bb->index);
2045 else if (e->src != ENTRY_BLOCK_PTR
2046 && e->dest != EXIT_BLOCK_PTR)
2050 if (e->src->next_bb != e->dest)
2053 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2054 e->src->index, e->dest->index);
2058 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2059 insn = NEXT_INSN (insn))
2060 if (BARRIER_P (insn) || INSN_P (insn))
2062 error ("verify_flow_info: Incorrect fallthru %i->%i",
2063 e->src->index, e->dest->index);
2064 fatal_insn ("wrong insn in the fallthru edge", insn);
2070 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2072 /* Check that the code before the first basic block has NULL
2075 && BLOCK_FOR_INSN (x) != NULL)
2077 error ("insn %d outside of basic blocks has non-NULL bb field",
2085 last_bb_seen = ENTRY_BLOCK_PTR;
2087 for (x = rtx_first; x; x = NEXT_INSN (x))
2089 if (NOTE_INSN_BASIC_BLOCK_P (x))
2091 bb = NOTE_BASIC_BLOCK (x);
2094 if (bb != last_bb_seen->next_bb)
2095 internal_error ("basic blocks not laid down consecutively");
2097 curr_bb = last_bb_seen = bb;
2102 switch (GET_CODE (x))
2109 /* An addr_vec is placed outside any basic block. */
2111 && JUMP_P (NEXT_INSN (x))
2112 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2113 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2116 /* But in any case, non-deletable labels can appear anywhere. */
2120 fatal_insn ("insn outside basic block", x);
2125 && returnjump_p (x) && ! condjump_p (x)
2126 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2127 fatal_insn ("return not followed by barrier", x);
2128 if (curr_bb && x == BB_END (curr_bb))
2132 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2134 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2135 num_bb_notes, n_basic_blocks);
2140 /* Assume that the preceding pass has possibly eliminated jump instructions
2141 or converted the unconditional jumps. Eliminate the edges from CFG.
2142 Return true if any edges are eliminated. */
2145 purge_dead_edges (basic_block bb)
2148 rtx insn = BB_END (bb), note;
2149 bool purged = false;
2153 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2154 if (NONJUMP_INSN_P (insn)
2155 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2159 if (! may_trap_p (PATTERN (insn))
2160 || ((eqnote = find_reg_equal_equiv_note (insn))
2161 && ! may_trap_p (XEXP (eqnote, 0))))
2162 remove_note (insn, note);
2165 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2166 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2168 /* There are three types of edges we need to handle correctly here: EH
2169 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2170 latter can appear when nonlocal gotos are used. */
2171 if (e->flags & EDGE_EH)
2173 if (can_throw_internal (BB_END (bb))
2174 /* If this is a call edge, verify that this is a call insn. */
2175 && (! (e->flags & EDGE_ABNORMAL_CALL)
2176 || CALL_P (BB_END (bb))))
2182 else if (e->flags & EDGE_ABNORMAL_CALL)
2184 if (CALL_P (BB_END (bb))
2185 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2186 || INTVAL (XEXP (note, 0)) >= 0))
2199 df_set_bb_dirty (bb);
2209 /* We do care only about conditional jumps and simplejumps. */
2210 if (!any_condjump_p (insn)
2211 && !returnjump_p (insn)
2212 && !simplejump_p (insn))
2215 /* Branch probability/prediction notes are defined only for
2216 condjumps. We've possibly turned condjump into simplejump. */
2217 if (simplejump_p (insn))
2219 note = find_reg_note (insn, REG_BR_PROB, NULL);
2221 remove_note (insn, note);
2222 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2223 remove_note (insn, note);
2226 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2228 /* Avoid abnormal flags to leak from computed jumps turned
2229 into simplejumps. */
2231 e->flags &= ~EDGE_ABNORMAL;
2233 /* See if this edge is one we should keep. */
2234 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2235 /* A conditional jump can fall through into the next
2236 block, so we should keep the edge. */
2241 else if (e->dest != EXIT_BLOCK_PTR
2242 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2243 /* If the destination block is the target of the jump,
2249 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2250 /* If the destination block is the exit block, and this
2251 instruction is a return, then keep the edge. */
2256 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2257 /* Keep the edges that correspond to exceptions thrown by
2258 this instruction and rematerialize the EDGE_ABNORMAL
2259 flag we just cleared above. */
2261 e->flags |= EDGE_ABNORMAL;
2266 /* We do not need this edge. */
2267 df_set_bb_dirty (bb);
2272 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2276 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2281 /* Redistribute probabilities. */
2282 if (single_succ_p (bb))
2284 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2285 single_succ_edge (bb)->count = bb->count;
2289 note = find_reg_note (insn, REG_BR_PROB, NULL);
2293 b = BRANCH_EDGE (bb);
2294 f = FALLTHRU_EDGE (bb);
2295 b->probability = INTVAL (XEXP (note, 0));
2296 f->probability = REG_BR_PROB_BASE - b->probability;
2297 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2298 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2303 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2305 /* First, there should not be any EH or ABCALL edges resulting
2306 from non-local gotos and the like. If there were, we shouldn't
2307 have created the sibcall in the first place. Second, there
2308 should of course never have been a fallthru edge. */
2309 gcc_assert (single_succ_p (bb));
2310 gcc_assert (single_succ_edge (bb)->flags
2311 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2316 /* If we don't see a jump insn, we don't know exactly why the block would
2317 have been broken at this point. Look for a simple, non-fallthru edge,
2318 as these are only created by conditional branches. If we find such an
2319 edge we know that there used to be a jump here and can then safely
2320 remove all non-fallthru edges. */
2322 FOR_EACH_EDGE (e, ei, bb->succs)
2323 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2332 /* Remove all but the fake and fallthru edges. The fake edge may be
2333 the only successor for this block in the case of noreturn
2335 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2337 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2339 df_set_bb_dirty (bb);
2347 gcc_assert (single_succ_p (bb));
2349 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2350 single_succ_edge (bb)->count = bb->count;
2353 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2358 /* Search all basic blocks for potentially dead edges and purge them. Return
2359 true if some edge has been eliminated. */
2362 purge_all_dead_edges (void)
2369 bool purged_here = purge_dead_edges (bb);
2371 purged |= purged_here;
2377 /* Same as split_block but update cfg_layout structures. */
2380 cfg_layout_split_block (basic_block bb, void *insnp)
2382 rtx insn = (rtx) insnp;
2383 basic_block new_bb = rtl_split_block (bb, insn);
2385 new_bb->il.rtl->footer = bb->il.rtl->footer;
2386 bb->il.rtl->footer = NULL;
2391 /* Redirect Edge to DEST. */
2393 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2395 basic_block src = e->src;
2398 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2401 if (e->dest == dest)
2404 if (e->src != ENTRY_BLOCK_PTR
2405 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2407 df_set_bb_dirty (src);
2411 if (e->src == ENTRY_BLOCK_PTR
2412 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2415 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2416 e->src->index, dest->index);
2418 df_set_bb_dirty (e->src);
2419 redirect_edge_succ (e, dest);
2423 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2424 in the case the basic block appears to be in sequence. Avoid this
2427 if (e->flags & EDGE_FALLTHRU)
2429 /* Redirect any branch edges unified with the fallthru one. */
2430 if (JUMP_P (BB_END (src))
2431 && label_is_jump_target_p (BB_HEAD (e->dest),
2437 fprintf (dump_file, "Fallthru edge unified with branch "
2438 "%i->%i redirected to %i\n",
2439 e->src->index, e->dest->index, dest->index);
2440 e->flags &= ~EDGE_FALLTHRU;
2441 redirected = redirect_branch_edge (e, dest);
2442 gcc_assert (redirected);
2443 e->flags |= EDGE_FALLTHRU;
2444 df_set_bb_dirty (e->src);
2447 /* In case we are redirecting fallthru edge to the branch edge
2448 of conditional jump, remove it. */
2449 if (EDGE_COUNT (src->succs) == 2)
2451 /* Find the edge that is different from E. */
2452 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2455 && any_condjump_p (BB_END (src))
2456 && onlyjump_p (BB_END (src)))
2457 delete_insn (BB_END (src));
2459 ret = redirect_edge_succ_nodup (e, dest);
2461 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2462 e->src->index, e->dest->index, dest->index);
2465 ret = redirect_branch_edge (e, dest);
2467 /* We don't want simplejumps in the insn stream during cfglayout. */
2468 gcc_assert (!simplejump_p (BB_END (src)));
2470 df_set_bb_dirty (src);
2474 /* Simple wrapper as we always can redirect fallthru edges. */
2476 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2478 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2480 gcc_assert (redirected);
2484 /* Same as delete_basic_block but update cfg_layout structures. */
2487 cfg_layout_delete_block (basic_block bb)
2489 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2491 if (bb->il.rtl->header)
2493 next = BB_HEAD (bb);
2495 NEXT_INSN (prev) = bb->il.rtl->header;
2497 set_first_insn (bb->il.rtl->header);
2498 PREV_INSN (bb->il.rtl->header) = prev;
2499 insn = bb->il.rtl->header;
2500 while (NEXT_INSN (insn))
2501 insn = NEXT_INSN (insn);
2502 NEXT_INSN (insn) = next;
2503 PREV_INSN (next) = insn;
2505 next = NEXT_INSN (BB_END (bb));
2506 if (bb->il.rtl->footer)
2508 insn = bb->il.rtl->footer;
2511 if (BARRIER_P (insn))
2513 if (PREV_INSN (insn))
2514 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2516 bb->il.rtl->footer = NEXT_INSN (insn);
2517 if (NEXT_INSN (insn))
2518 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2522 insn = NEXT_INSN (insn);
2524 if (bb->il.rtl->footer)
2527 NEXT_INSN (insn) = bb->il.rtl->footer;
2528 PREV_INSN (bb->il.rtl->footer) = insn;
2529 while (NEXT_INSN (insn))
2530 insn = NEXT_INSN (insn);
2531 NEXT_INSN (insn) = next;
2533 PREV_INSN (next) = insn;
2535 set_last_insn (insn);
2538 if (bb->next_bb != EXIT_BLOCK_PTR)
2539 to = &bb->next_bb->il.rtl->header;
2541 to = &cfg_layout_function_footer;
2543 rtl_delete_block (bb);
2546 prev = NEXT_INSN (prev);
2548 prev = get_insns ();
2550 next = PREV_INSN (next);
2552 next = get_last_insn ();
2554 if (next && NEXT_INSN (next) != prev)
2556 remaints = unlink_insn_chain (prev, next);
2558 while (NEXT_INSN (insn))
2559 insn = NEXT_INSN (insn);
2560 NEXT_INSN (insn) = *to;
2562 PREV_INSN (*to) = insn;
2567 /* Return true when blocks A and B can be safely merged. */
2570 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2572 /* If we are partitioning hot/cold basic blocks, we don't want to
2573 mess up unconditional or indirect jumps that cross between hot
2576 Basic block partitioning may result in some jumps that appear to
2577 be optimizable (or blocks that appear to be mergeable), but which really
2578 must be left untouched (they are required to make it safely across
2579 partition boundaries). See the comments at the top of
2580 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2582 if (BB_PARTITION (a) != BB_PARTITION (b))
2585 /* There must be exactly one edge in between the blocks. */
2586 return (single_succ_p (a)
2587 && single_succ (a) == b
2588 && single_pred_p (b) == 1
2590 /* Must be simple edge. */
2591 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2592 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2593 /* If the jump insn has side effects, we can't kill the edge.
2594 When not optimizing, try_redirect_by_replacing_jump will
2595 not allow us to redirect an edge by replacing a table jump. */
2596 && (!JUMP_P (BB_END (a))
2597 || ((!optimize || reload_completed)
2598 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2601 /* Merge block A and B. The blocks must be mergeable. */
2604 cfg_layout_merge_blocks (basic_block a, basic_block b)
2606 #ifdef ENABLE_CHECKING
2607 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2611 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2613 /* If there was a CODE_LABEL beginning B, delete it. */
2614 if (LABEL_P (BB_HEAD (b)))
2616 /* This might have been an EH label that no longer has incoming
2617 EH edges. Update data structures to match. */
2618 maybe_remove_eh_handler (BB_HEAD (b));
2620 delete_insn (BB_HEAD (b));
2623 /* We should have fallthru edge in a, or we can do dummy redirection to get
2625 if (JUMP_P (BB_END (a)))
2626 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2627 gcc_assert (!JUMP_P (BB_END (a)));
2629 /* When not optimizing and the edge is the only place in RTL which holds
2630 some unique locus, emit a nop with that locus in between. */
2631 if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2633 rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2634 int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2636 while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2637 insn = PREV_INSN (insn);
2638 if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2643 end = NEXT_INSN (BB_END (b));
2644 while (insn != end && !INSN_P (insn))
2645 insn = NEXT_INSN (insn);
2646 if (insn != end && INSN_LOCATOR (insn) != 0
2647 && locator_eq (INSN_LOCATOR (insn), goto_locus))
2652 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2653 INSN_LOCATOR (BB_END (a)) = goto_locus;
2657 /* Possible line number notes should appear in between. */
2658 if (b->il.rtl->header)
2660 rtx first = BB_END (a), last;
2662 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2663 delete_insn_chain (NEXT_INSN (first), last, false);
2664 b->il.rtl->header = NULL;
2667 /* In the case basic blocks are not adjacent, move them around. */
2668 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2670 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2672 emit_insn_after_noloc (first, BB_END (a), a);
2673 /* Skip possible DELETED_LABEL insn. */
2674 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2675 first = NEXT_INSN (first);
2676 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2679 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2680 We need to explicitly call. */
2681 update_bb_for_insn_chain (NEXT_INSN (first),
2685 delete_insn (first);
2687 /* Otherwise just re-associate the instructions. */
2692 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2695 /* Skip possible DELETED_LABEL insn. */
2696 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2697 insn = NEXT_INSN (insn);
2698 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2700 BB_END (a) = BB_END (b);
2704 df_bb_delete (b->index);
2706 /* Possible tablejumps and barriers should appear after the block. */
2707 if (b->il.rtl->footer)
2709 if (!a->il.rtl->footer)
2710 a->il.rtl->footer = b->il.rtl->footer;
2713 rtx last = a->il.rtl->footer;
2715 while (NEXT_INSN (last))
2716 last = NEXT_INSN (last);
2717 NEXT_INSN (last) = b->il.rtl->footer;
2718 PREV_INSN (b->il.rtl->footer) = last;
2720 b->il.rtl->footer = NULL;
2724 fprintf (dump_file, "Merged blocks %d and %d.\n",
2725 a->index, b->index);
2731 cfg_layout_split_edge (edge e)
2733 basic_block new_bb =
2734 create_basic_block (e->src != ENTRY_BLOCK_PTR
2735 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2738 if (e->dest == EXIT_BLOCK_PTR)
2739 BB_COPY_PARTITION (new_bb, e->src);
2741 BB_COPY_PARTITION (new_bb, e->dest);
2742 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2743 redirect_edge_and_branch_force (e, new_bb);
2748 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2751 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2755 /* Return 1 if BB ends with a call, possibly followed by some
2756 instructions that must stay with the call, 0 otherwise. */
2759 rtl_block_ends_with_call_p (basic_block bb)
2761 rtx insn = BB_END (bb);
2763 while (!CALL_P (insn)
2764 && insn != BB_HEAD (bb)
2765 && (keep_with_call_p (insn)
2767 insn = PREV_INSN (insn);
2768 return (CALL_P (insn));
2771 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2774 rtl_block_ends_with_condjump_p (const_basic_block bb)
2776 return any_condjump_p (BB_END (bb));
2779 /* Return true if we need to add fake edge to exit.
2780 Helper function for rtl_flow_call_edges_add. */
2783 need_fake_edge_p (const_rtx insn)
2789 && !SIBLING_CALL_P (insn)
2790 && !find_reg_note (insn, REG_NORETURN, NULL)
2791 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2794 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2795 && MEM_VOLATILE_P (PATTERN (insn)))
2796 || (GET_CODE (PATTERN (insn)) == PARALLEL
2797 && asm_noperands (insn) != -1
2798 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2799 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2802 /* Add fake edges to the function exit for any non constant and non noreturn
2803 calls, volatile inline assembly in the bitmap of blocks specified by
2804 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2807 The goal is to expose cases in which entering a basic block does not imply
2808 that all subsequent instructions must be executed. */
2811 rtl_flow_call_edges_add (sbitmap blocks)
2814 int blocks_split = 0;
2815 int last_bb = last_basic_block;
2816 bool check_last_block = false;
2818 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2822 check_last_block = true;
2824 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2826 /* In the last basic block, before epilogue generation, there will be
2827 a fallthru edge to EXIT. Special care is required if the last insn
2828 of the last basic block is a call because make_edge folds duplicate
2829 edges, which would result in the fallthru edge also being marked
2830 fake, which would result in the fallthru edge being removed by
2831 remove_fake_edges, which would result in an invalid CFG.
2833 Moreover, we can't elide the outgoing fake edge, since the block
2834 profiler needs to take this into account in order to solve the minimal
2835 spanning tree in the case that the call doesn't return.
2837 Handle this by adding a dummy instruction in a new last basic block. */
2838 if (check_last_block)
2840 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2841 rtx insn = BB_END (bb);
2843 /* Back up past insns that must be kept in the same block as a call. */
2844 while (insn != BB_HEAD (bb)
2845 && keep_with_call_p (insn))
2846 insn = PREV_INSN (insn);
2848 if (need_fake_edge_p (insn))
2852 e = find_edge (bb, EXIT_BLOCK_PTR);
2855 insert_insn_on_edge (gen_use (const0_rtx), e);
2856 commit_edge_insertions ();
2861 /* Now add fake edges to the function exit for any non constant
2862 calls since there is no way that we can determine if they will
2865 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2867 basic_block bb = BASIC_BLOCK (i);
2874 if (blocks && !TEST_BIT (blocks, i))
2877 for (insn = BB_END (bb); ; insn = prev_insn)
2879 prev_insn = PREV_INSN (insn);
2880 if (need_fake_edge_p (insn))
2883 rtx split_at_insn = insn;
2885 /* Don't split the block between a call and an insn that should
2886 remain in the same block as the call. */
2888 while (split_at_insn != BB_END (bb)
2889 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2890 split_at_insn = NEXT_INSN (split_at_insn);
2892 /* The handling above of the final block before the epilogue
2893 should be enough to verify that there is no edge to the exit
2894 block in CFG already. Calling make_edge in such case would
2895 cause us to mark that edge as fake and remove it later. */
2897 #ifdef ENABLE_CHECKING
2898 if (split_at_insn == BB_END (bb))
2900 e = find_edge (bb, EXIT_BLOCK_PTR);
2901 gcc_assert (e == NULL);
2905 /* Note that the following may create a new basic block
2906 and renumber the existing basic blocks. */
2907 if (split_at_insn != BB_END (bb))
2909 e = split_block (bb, split_at_insn);
2914 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2917 if (insn == BB_HEAD (bb))
2923 verify_flow_info ();
2925 return blocks_split;
2928 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2929 the conditional branch target, SECOND_HEAD should be the fall-thru
2930 there is no need to handle this here the loop versioning code handles
2931 this. the reason for SECON_HEAD is that it is needed for condition
2932 in trees, and this should be of the same type since it is a hook. */
2934 rtl_lv_add_condition_to_bb (basic_block first_head ,
2935 basic_block second_head ATTRIBUTE_UNUSED,
2936 basic_block cond_bb, void *comp_rtx)
2938 rtx label, seq, jump;
2939 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2940 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2941 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2942 enum machine_mode mode;
2945 label = block_label (first_head);
2946 mode = GET_MODE (op0);
2947 if (mode == VOIDmode)
2948 mode = GET_MODE (op1);
2951 op0 = force_operand (op0, NULL_RTX);
2952 op1 = force_operand (op1, NULL_RTX);
2953 do_compare_rtx_and_jump (op0, op1, comp, 0,
2954 mode, NULL_RTX, NULL_RTX, label);
2955 jump = get_last_insn ();
2956 JUMP_LABEL (jump) = label;
2957 LABEL_NUSES (label)++;
2961 /* Add the new cond , in the new head. */
2962 emit_insn_after(seq, BB_END(cond_bb));
2966 /* Given a block B with unconditional branch at its end, get the
2967 store the return the branch edge and the fall-thru edge in
2968 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2970 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2971 edge *fallthru_edge)
2973 edge e = EDGE_SUCC (b, 0);
2975 if (e->flags & EDGE_FALLTHRU)
2978 *branch_edge = EDGE_SUCC (b, 1);
2983 *fallthru_edge = EDGE_SUCC (b, 1);
2988 init_rtl_bb_info (basic_block bb)
2990 gcc_assert (!bb->il.rtl);
2991 bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
2995 /* Add EXPR to the end of basic block BB. */
2998 insert_insn_end_bb_new (rtx pat, basic_block bb)
3000 rtx insn = BB_END (bb);
3004 while (NEXT_INSN (pat_end) != NULL_RTX)
3005 pat_end = NEXT_INSN (pat_end);
3007 /* If the last insn is a jump, insert EXPR in front [taking care to
3008 handle cc0, etc. properly]. Similarly we need to care trapping
3009 instructions in presence of non-call exceptions. */
3012 || (NONJUMP_INSN_P (insn)
3013 && (!single_succ_p (bb)
3014 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
3019 /* If this is a jump table, then we can't insert stuff here. Since
3020 we know the previous real insn must be the tablejump, we insert
3021 the new instruction just before the tablejump. */
3022 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3023 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3024 insn = prev_real_insn (insn);
3027 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
3028 if cc0 isn't set. */
3029 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3031 insn = XEXP (note, 0);
3034 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
3035 if (maybe_cc0_setter
3036 && INSN_P (maybe_cc0_setter)
3037 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
3038 insn = maybe_cc0_setter;
3041 /* FIXME: What if something in cc0/jump uses value set in new
3043 new_insn = emit_insn_before_noloc (pat, insn, bb);
3046 /* Likewise if the last insn is a call, as will happen in the presence
3047 of exception handling. */
3048 else if (CALL_P (insn)
3049 && (!single_succ_p (bb)
3050 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3052 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3053 we search backward and place the instructions before the first
3054 parameter is loaded. Do this for everyone for consistency and a
3055 presumption that we'll get better code elsewhere as well. */
3057 /* Since different machines initialize their parameter registers
3058 in different orders, assume nothing. Collect the set of all
3059 parameter registers. */
3060 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3062 /* If we found all the parameter loads, then we want to insert
3063 before the first parameter load.
3065 If we did not find all the parameter loads, then we might have
3066 stopped on the head of the block, which could be a CODE_LABEL.
3067 If we inserted before the CODE_LABEL, then we would be putting
3068 the insn in the wrong basic block. In that case, put the insn
3069 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3070 while (LABEL_P (insn)
3071 || NOTE_INSN_BASIC_BLOCK_P (insn))
3072 insn = NEXT_INSN (insn);
3074 new_insn = emit_insn_before_noloc (pat, insn, bb);
3077 new_insn = emit_insn_after_noloc (pat, insn, bb);
3082 /* Returns true if it is possible to remove edge E by redirecting
3083 it to the destination of the other edge from E->src. */
3086 rtl_can_remove_branch_p (const_edge e)
3088 const_basic_block src = e->src;
3089 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3090 const_rtx insn = BB_END (src), set;
3092 /* The conditions are taken from try_redirect_by_replacing_jump. */
3093 if (target == EXIT_BLOCK_PTR)
3096 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3099 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3100 || BB_PARTITION (src) != BB_PARTITION (target))
3103 if (!onlyjump_p (insn)
3104 || tablejump_p (insn, NULL, NULL))
3107 set = single_set (insn);
3108 if (!set || side_effects_p (set))
3114 /* Implementation of CFG manipulation for linearized RTL. */
3115 struct cfg_hooks rtl_cfg_hooks = {
3117 rtl_verify_flow_info,
3119 rtl_create_basic_block,
3120 rtl_redirect_edge_and_branch,
3121 rtl_redirect_edge_and_branch_force,
3122 rtl_can_remove_branch_p,
3125 rtl_move_block_after,
3126 rtl_can_merge_blocks, /* can_merge_blocks_p */
3130 NULL, /* can_duplicate_block_p */
3131 NULL, /* duplicate_block */
3133 rtl_make_forwarder_block,
3134 rtl_tidy_fallthru_edge,
3135 rtl_block_ends_with_call_p,
3136 rtl_block_ends_with_condjump_p,
3137 rtl_flow_call_edges_add,
3138 NULL, /* execute_on_growing_pred */
3139 NULL, /* execute_on_shrinking_pred */
3140 NULL, /* duplicate loop for trees */
3141 NULL, /* lv_add_condition_to_bb */
3142 NULL, /* lv_adjust_loop_header_phi*/
3143 NULL, /* extract_cond_bb_edges */
3144 NULL /* flush_pending_stmts */
3147 /* Implementation of CFG manipulation for cfg layout RTL, where
3148 basic block connected via fallthru edges does not have to be adjacent.
3149 This representation will hopefully become the default one in future
3150 version of the compiler. */
3152 /* We do not want to declare these functions in a header file, since they
3153 should only be used through the cfghooks interface, and we do not want to
3154 move them here since it would require also moving quite a lot of related
3155 code. They are in cfglayout.c. */
3156 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3157 extern basic_block cfg_layout_duplicate_bb (basic_block);
3159 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3161 rtl_verify_flow_info_1,
3163 cfg_layout_create_basic_block,
3164 cfg_layout_redirect_edge_and_branch,
3165 cfg_layout_redirect_edge_and_branch_force,
3166 rtl_can_remove_branch_p,
3167 cfg_layout_delete_block,
3168 cfg_layout_split_block,
3169 rtl_move_block_after,
3170 cfg_layout_can_merge_blocks_p,
3171 cfg_layout_merge_blocks,
3174 cfg_layout_can_duplicate_bb_p,
3175 cfg_layout_duplicate_bb,
3176 cfg_layout_split_edge,
3177 rtl_make_forwarder_block,
3179 rtl_block_ends_with_call_p,
3180 rtl_block_ends_with_condjump_p,
3181 rtl_flow_call_edges_add,
3182 NULL, /* execute_on_growing_pred */
3183 NULL, /* execute_on_shrinking_pred */
3184 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3185 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3186 NULL, /* lv_adjust_loop_header_phi*/
3187 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3188 NULL /* flush_pending_stmts */