1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
108 reg_n_calls_crosses and reg_basic_block.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
125 #include "basic-block.h"
126 #include "insn-config.h"
128 #include "hard-reg-set.h"
134 #include "insn-flags.h"
137 #define obstack_chunk_alloc xmalloc
138 #define obstack_chunk_free free
141 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
142 the stack pointer does not matter. The value is tested only in
143 functions that have frame pointers.
144 No definition is equivalent to always zero. */
145 #ifndef EXIT_IGNORE_STACK
146 #define EXIT_IGNORE_STACK 0
150 /* The contents of the current function definition are allocated
151 in this obstack, and all are freed at the end of the function.
152 For top-level functions, this is temporary_obstack.
153 Separate obstacks are made for nested functions. */
155 extern struct obstack *function_obstack;
157 /* List of labels that must never be deleted. */
158 extern rtx forced_labels;
160 /* Number of basic blocks in the current function. */
164 /* The basic block array. */
166 varray_type basic_block_info;
168 /* The special entry and exit blocks. */
170 struct basic_block_def entry_exit_blocks[2] =
177 NULL, /* local_set */
178 NULL, /* global_live_at_start */
179 NULL, /* global_live_at_end */
181 ENTRY_BLOCK, /* index */
189 NULL, /* local_set */
190 NULL, /* global_live_at_start */
191 NULL, /* global_live_at_end */
193 EXIT_BLOCK, /* index */
198 /* Nonzero if the second flow pass has completed. */
201 /* Maximum register number used in this function, plus one. */
205 /* Indexed by n, giving various register information */
207 varray_type reg_n_info;
209 /* Size of the reg_n_info table. */
211 unsigned int reg_n_max;
213 /* Element N is the next insn that uses (hard or pseudo) register number N
214 within the current basic block; or zero, if there is no such insn.
215 This is valid only during the final backward scan in propagate_block. */
217 static rtx *reg_next_use;
219 /* Size of a regset for the current function,
220 in (1) bytes and (2) elements. */
225 /* Regset of regs live when calls to `setjmp'-like functions happen. */
226 /* ??? Does this exist only for the setjmp-clobbered warning message? */
228 regset regs_live_at_setjmp;
230 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
231 that have to go in the same hard reg.
232 The first two regs in the list are a pair, and the next two
233 are another pair, etc. */
236 /* Depth within loops of basic block being scanned for lifetime analysis,
237 plus one. This is the weight attached to references to registers. */
239 static int loop_depth;
241 /* During propagate_block, this is non-zero if the value of CC0 is live. */
245 /* During propagate_block, this contains a list of all the MEMs we are
246 tracking for dead store elimination.
248 ?!? Note we leak memory by not free-ing items on this list. We need to
249 write some generic routines to operate on memory lists since cse, gcse,
250 loop, sched, flow and possibly other passes all need to do basically the
251 same operations on these lists. */
253 static rtx mem_set_list;
255 /* Set of registers that may be eliminable. These are handled specially
256 in updating regs_ever_live. */
258 static HARD_REG_SET elim_reg_set;
260 /* The basic block structure for every insn, indexed by uid. */
262 varray_type basic_block_for_insn;
264 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
265 /* ??? Should probably be using LABEL_NUSES instead. It would take a
266 bit of surgery to be able to use or co-opt the routines in jump. */
268 static rtx label_value_list;
270 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
272 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
273 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
274 static bitmap uid_volatile;
276 /* Forward declarations */
277 static int count_basic_blocks PROTO((rtx));
278 static rtx find_basic_blocks_1 PROTO((rtx, rtx*));
279 static void create_basic_block PROTO((int, rtx, rtx, rtx));
280 static void compute_bb_for_insn PROTO((varray_type, int));
281 static void clear_edges PROTO((void));
282 static void make_edges PROTO((rtx, rtx*));
283 static void make_edge PROTO((basic_block, basic_block, int));
284 static void make_label_edge PROTO((basic_block, rtx, int));
285 static void mark_critical_edges PROTO((void));
287 static void commit_one_edge_insertion PROTO((edge));
289 static void delete_unreachable_blocks PROTO((void));
290 static void delete_eh_regions PROTO((void));
291 static int can_delete_note_p PROTO((rtx));
292 static void delete_insn_chain PROTO((rtx, rtx));
293 static int delete_block PROTO((basic_block));
294 static void expunge_block PROTO((basic_block));
295 static rtx flow_delete_insn PROTO((rtx));
296 static int can_delete_label_p PROTO((rtx));
297 static void merge_blocks_nomove PROTO((basic_block, basic_block));
298 static int merge_blocks PROTO((edge,basic_block,basic_block));
299 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
300 static void calculate_loop_depth PROTO((rtx));
302 static int set_noop_p PROTO((rtx));
303 static int noop_move_p PROTO((rtx));
304 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
305 static void record_volatile_insns PROTO((rtx));
306 static void mark_regs_live_at_end PROTO((regset));
307 static void life_analysis_1 PROTO((rtx, int, int));
308 static void init_regset_vector PROTO ((regset *, int,
310 static void propagate_block PROTO((regset, rtx, rtx, int,
312 static int insn_dead_p PROTO((rtx, regset, int, rtx));
313 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
314 static void mark_set_regs PROTO((regset, regset, rtx,
316 static void mark_set_1 PROTO((regset, regset, rtx,
319 static void find_auto_inc PROTO((regset, rtx, rtx));
320 static int try_pre_increment_1 PROTO((rtx));
321 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
323 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
324 void dump_flow_info PROTO((FILE *));
325 static void dump_edge_info PROTO((FILE *, edge, int));
327 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
328 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
331 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
332 int_list_ptr *, int *, int *));
334 static void count_reg_sets_1 PROTO ((rtx));
335 static void count_reg_sets PROTO ((rtx));
336 static void count_reg_references PROTO ((rtx));
337 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
338 static void invalidate_mems_from_autoinc PROTO ((rtx));
339 void verify_flow_info PROTO ((void));
341 /* Find basic blocks of the current function.
342 F is the first insn of the function and NREGS the number of register
346 find_basic_blocks (f, nregs, file, do_cleanup)
348 int nregs ATTRIBUTE_UNUSED;
349 FILE *file ATTRIBUTE_UNUSED;
355 /* Flush out existing data. */
356 if (basic_block_info != NULL)
362 /* Clear bb->aux on all extant basic blocks. We'll use this as a
363 tag for reuse during create_basic_block, just in case some pass
364 copies around basic block notes improperly. */
365 for (i = 0; i < n_basic_blocks; ++i)
366 BASIC_BLOCK (i)->aux = NULL;
368 VARRAY_FREE (basic_block_info);
371 n_basic_blocks = count_basic_blocks (f);
373 /* Size the basic block table. The actual structures will be allocated
374 by find_basic_blocks_1, since we want to keep the structure pointers
375 stable across calls to find_basic_blocks. */
376 /* ??? This whole issue would be much simpler if we called find_basic_blocks
377 exactly once, and thereafter we don't have a single long chain of
378 instructions at all until close to the end of compilation when we
379 actually lay them out. */
381 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
383 /* An array to record the active exception region at the end of each
384 basic block. It is filled in by find_basic_blocks_1 for make_edges. */
385 bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
387 label_value_list = find_basic_blocks_1 (f, bb_eh_end);
389 /* Record the block to which an insn belongs. */
390 /* ??? This should be done another way, by which (perhaps) a label is
391 tagged directly with the basic block that it starts. It is used for
392 more than that currently, but IMO that is the only valid use. */
394 max_uid = get_max_uid ();
396 /* Leave space for insns life_analysis makes in some cases for auto-inc.
397 These cases are rare, so we don't need too much space. */
398 max_uid += max_uid / 10;
401 VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
402 compute_bb_for_insn (basic_block_for_insn, max_uid);
404 /* Discover the edges of our cfg. */
406 make_edges (label_value_list, bb_eh_end);
408 /* Delete unreachable blocks. */
411 delete_unreachable_blocks ();
413 /* Mark critical edges. */
415 mark_critical_edges ();
417 /* Discover the loop depth at the start of each basic block to aid
418 register allocation. */
419 calculate_loop_depth (f);
421 /* Kill the data we won't maintain. */
422 label_value_list = 0;
424 #ifdef ENABLE_CHECKING
429 /* Count the basic blocks of the function. */
432 count_basic_blocks (f)
436 register RTX_CODE prev_code;
437 register int count = 0;
439 int call_had_abnormal_edge = 0;
440 rtx prev_call = NULL_RTX;
442 prev_code = JUMP_INSN;
443 for (insn = f; insn; insn = NEXT_INSN (insn))
445 register RTX_CODE code = GET_CODE (insn);
447 if (code == CODE_LABEL
448 || (GET_RTX_CLASS (code) == 'i'
449 && (prev_code == JUMP_INSN
450 || prev_code == BARRIER
451 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
455 /* If the previous insn was a call that did not create an
456 abnormal edge, we want to add a nop so that the CALL_INSN
457 itself is not at basic_block_end. This allows us to
458 easily distinguish between normal calls and those which
459 create abnormal edges in the flow graph. */
461 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
463 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
464 emit_insn_after (nop, prev_call);
468 /* Record whether this call created an edge. */
469 if (code == CALL_INSN)
471 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
472 int region = (note ? XINT (XEXP (note, 0), 0) : 1);
474 call_had_abnormal_edge = 0;
476 /* If there is a specified EH region, we have an edge. */
477 if (eh_region && region > 0)
478 call_had_abnormal_edge = 1;
481 /* If there is a nonlocal goto label and the specified
482 region number isn't -1, we have an edge. (0 means
483 no throw, but might have a nonlocal goto). */
484 if (nonlocal_goto_handler_labels && region >= 0)
485 call_had_abnormal_edge = 1;
488 else if (code != NOTE)
489 prev_call = NULL_RTX;
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
495 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
500 /* The rest of the compiler works a bit smoother when we don't have to
501 check for the edge case of do-nothing functions with no basic blocks. */
504 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
511 /* Find all basic blocks of the function whose first insn is F.
512 Store the correct data in the tables that describe the basic blocks,
513 set up the chains of references for each CODE_LABEL, and
514 delete any entire basic blocks that cannot be reached.
516 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
517 that are otherwise unreachable may be reachable with a non-local goto.
519 BB_EH_END is an array in which we record the list of exception regions
520 active at the end of every basic block. */
523 find_basic_blocks_1 (f, bb_eh_end)
527 register rtx insn, next;
528 int call_has_abnormal_edge = 0;
530 rtx bb_note = NULL_RTX;
531 rtx eh_list = NULL_RTX;
532 rtx label_value_list = NULL_RTX;
536 /* We process the instructions in a slightly different way than we did
537 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
538 closed out the previous block, so that it gets attached at the proper
539 place. Since this form should be equivalent to the previous,
540 find_basic_blocks_0 continues to use the old form as a check. */
542 for (insn = f; insn; insn = next)
544 enum rtx_code code = GET_CODE (insn);
546 next = NEXT_INSN (insn);
548 if (code == CALL_INSN)
550 /* Record whether this call created an edge. */
551 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
552 int region = (note ? XINT (XEXP (note, 0), 0) : 1);
553 call_has_abnormal_edge = 0;
555 /* If there is an EH region, we have an edge. */
556 if (eh_list && region > 0)
557 call_has_abnormal_edge = 1;
560 /* If there is a nonlocal goto label and the specified
561 region number isn't -1, we have an edge. (0 means
562 no throw, but might have a nonlocal goto). */
563 if (nonlocal_goto_handler_labels && region >= 0)
564 call_has_abnormal_edge = 1;
572 int kind = NOTE_LINE_NUMBER (insn);
574 /* Keep a LIFO list of the currently active exception notes. */
575 if (kind == NOTE_INSN_EH_REGION_BEG)
576 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
577 else if (kind == NOTE_INSN_EH_REGION_END)
578 eh_list = XEXP (eh_list, 1);
580 /* Look for basic block notes with which to keep the
581 basic_block_info pointers stable. Unthread the note now;
582 we'll put it back at the right place in create_basic_block.
583 Or not at all if we've already found a note in this block. */
584 else if (kind == NOTE_INSN_BASIC_BLOCK)
586 if (bb_note == NULL_RTX)
588 next = flow_delete_insn (insn);
595 /* A basic block starts at a label. If we've closed one off due
596 to a barrier or some such, no need to do it again. */
597 if (head != NULL_RTX)
599 /* While we now have edge lists with which other portions of
600 the compiler might determine a call ending a basic block
601 does not imply an abnormal edge, it will be a bit before
602 everything can be updated. So continue to emit a noop at
603 the end of such a block. */
604 if (GET_CODE (end) == CALL_INSN)
606 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
607 end = emit_insn_after (nop, end);
610 bb_eh_end[i] = eh_list;
611 create_basic_block (i++, head, end, bb_note);
618 /* A basic block ends at a jump. */
619 if (head == NULL_RTX)
623 /* ??? Make a special check for table jumps. The way this
624 happens is truely and amazingly gross. We are about to
625 create a basic block that contains just a code label and
626 an addr*vec jump insn. Worse, an addr_diff_vec creates
627 its own natural loop.
629 Prevent this bit of brain damage, pasting things together
630 correctly in make_edges.
632 The correct solution involves emitting the table directly
633 on the tablejump instruction as a note, or JUMP_LABEL. */
635 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
636 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
644 goto new_bb_inclusive;
647 /* A basic block ends at a barrier. It may be that an unconditional
648 jump already closed the basic block -- no need to do it again. */
649 if (head == NULL_RTX)
652 /* While we now have edge lists with which other portions of the
653 compiler might determine a call ending a basic block does not
654 imply an abnormal edge, it will be a bit before everything can
655 be updated. So continue to emit a noop at the end of such a
657 if (GET_CODE (end) == CALL_INSN)
659 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
660 end = emit_insn_after (nop, end);
662 goto new_bb_exclusive;
665 /* A basic block ends at a call that can either throw or
666 do a non-local goto. */
667 if (call_has_abnormal_edge)
670 if (head == NULL_RTX)
675 bb_eh_end[i] = eh_list;
676 create_basic_block (i++, head, end, bb_note);
677 head = end = NULL_RTX;
684 if (GET_RTX_CLASS (code) == 'i')
686 if (head == NULL_RTX)
693 if (GET_RTX_CLASS (code) == 'i')
697 /* Make a list of all labels referred to other than by jumps
698 (which just don't have the REG_LABEL notes).
700 Make a special exception for labels followed by an ADDR*VEC,
701 as this would be a part of the tablejump setup code.
703 Make a special exception for the eh_return_stub_label, which
704 we know isn't part of any otherwise visible control flow. */
706 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
707 if (REG_NOTE_KIND (note) == REG_LABEL)
709 rtx lab = XEXP (note, 0), next;
711 if (lab == eh_return_stub_label)
713 else if ((next = next_nonnote_insn (lab)) != NULL
714 && GET_CODE (next) == JUMP_INSN
715 && (GET_CODE (PATTERN (next)) == ADDR_VEC
716 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
720 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
726 if (head != NULL_RTX)
728 bb_eh_end[i] = eh_list;
729 create_basic_block (i++, head, end, bb_note);
732 if (i != n_basic_blocks)
735 return label_value_list;
738 /* Create a new basic block consisting of the instructions between
739 HEAD and END inclusive. Reuses the note and basic block struct
740 in BB_NOTE, if any. */
743 create_basic_block (index, head, end, bb_note)
745 rtx head, end, bb_note;
750 && ! RTX_INTEGRATED_P (bb_note)
751 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
754 /* If we found an existing note, thread it back onto the chain. */
756 if (GET_CODE (head) == CODE_LABEL)
757 add_insn_after (bb_note, head);
760 add_insn_before (bb_note, head);
766 /* Otherwise we must create a note and a basic block structure.
767 Since we allow basic block structs in rtl, give the struct
768 the same lifetime by allocating it off the function obstack
769 rather than using malloc. */
771 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
772 memset (bb, 0, sizeof (*bb));
774 if (GET_CODE (head) == CODE_LABEL)
775 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
778 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
781 NOTE_BASIC_BLOCK (bb_note) = bb;
784 /* Always include the bb note in the block. */
785 if (NEXT_INSN (end) == bb_note)
791 BASIC_BLOCK (index) = bb;
793 /* Tag the block so that we know it has been used when considering
794 other basic block notes. */
798 /* Records the basic block struct in BB_FOR_INSN, for every instruction
799 indexed by INSN_UID. MAX is the size of the array. */
802 compute_bb_for_insn (bb_for_insn, max)
803 varray_type bb_for_insn;
808 for (i = 0; i < n_basic_blocks; ++i)
810 basic_block bb = BASIC_BLOCK (i);
817 int uid = INSN_UID (insn);
819 VARRAY_BB (bb_for_insn, uid) = bb;
822 insn = NEXT_INSN (insn);
827 /* Free the memory associated with the edge structures. */
835 for (i = 0; i < n_basic_blocks; ++i)
837 basic_block bb = BASIC_BLOCK (i);
839 for (e = bb->succ; e ; e = n)
849 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
855 ENTRY_BLOCK_PTR->succ = 0;
856 EXIT_BLOCK_PTR->pred = 0;
859 /* Identify the edges between basic blocks.
861 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
862 that are otherwise unreachable may be reachable with a non-local goto.
864 BB_EH_END is an array indexed by basic block number in which we record
865 the list of exception regions active at the end of the basic block. */
868 make_edges (label_value_list, bb_eh_end)
869 rtx label_value_list;
874 /* Assume no computed jump; revise as we create edges. */
875 current_function_has_computed_jump = 0;
877 /* By nature of the way these get numbered, block 0 is always the entry. */
878 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
880 for (i = 0; i < n_basic_blocks; ++i)
882 basic_block bb = BASIC_BLOCK (i);
883 rtx insn, x, eh_list;
885 int force_fallthru = 0;
887 /* If we have asynchronous exceptions, scan the notes for all exception
888 regions active in the block. In the normal case, we only need the
889 one active at the end of the block, which is bb_eh_end[i]. */
891 eh_list = bb_eh_end[i];
892 if (asynchronous_exceptions)
894 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
896 if (GET_CODE (insn) == NOTE
897 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
898 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
902 /* Now examine the last instruction of the block, and discover the
903 ways we can leave the block. */
906 code = GET_CODE (insn);
909 if (code == JUMP_INSN)
913 /* ??? Recognize a tablejump and do the right thing. */
914 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
915 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
916 && GET_CODE (tmp) == JUMP_INSN
917 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
918 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
923 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
924 vec = XVEC (PATTERN (tmp), 0);
926 vec = XVEC (PATTERN (tmp), 1);
928 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
929 make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
931 /* Some targets (eg, ARM) emit a conditional jump that also
932 contains the out-of-range target. Scan for these and
933 add an edge if necessary. */
934 if ((tmp = single_set (insn)) != NULL
935 && SET_DEST (tmp) == pc_rtx
936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938 make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
940 #ifdef CASE_DROPS_THROUGH
941 /* Silly VAXen. The ADDR_VEC is going to be in the way of
942 us naturally detecting fallthru into the next block. */
947 /* If this is a computed jump, then mark it as reaching
948 everything on the label_value_list and forced_labels list. */
949 else if (computed_jump_p (insn))
951 current_function_has_computed_jump = 1;
953 for (x = label_value_list; x; x = XEXP (x, 1))
954 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
956 for (x = forced_labels; x; x = XEXP (x, 1))
957 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
960 /* Returns create an exit out. */
961 else if (returnjump_p (insn))
962 make_edge (bb, EXIT_BLOCK_PTR, 0);
964 /* Otherwise, we have a plain conditional or unconditional jump. */
967 if (! JUMP_LABEL (insn))
969 make_label_edge (bb, JUMP_LABEL (insn), 0);
973 /* If this is a CALL_INSN, then mark it as reaching the active EH
974 handler for this CALL_INSN. If we're handling asynchronous
975 exceptions then any insn can reach any of the active handlers.
977 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
979 if (code == CALL_INSN || asynchronous_exceptions)
981 int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
984 /* Use REG_EH_RETHROW and REG_EH_REGION if available. */
985 /* ??? REG_EH_REGION is not generated presently. Is it
986 inteded that there be multiple notes for the regions?
987 or is my eh_list collection redundant with handler linking? */
989 x = find_reg_note (insn, REG_EH_RETHROW, 0);
991 x = find_reg_note (insn, REG_EH_REGION, 0);
994 if (XINT (XEXP (x, 0), 0) > 0)
996 ptr = get_first_handler (XINT (XEXP (x, 0), 0));
999 make_label_edge (bb, ptr->handler_label,
1000 EDGE_ABNORMAL | EDGE_EH | is_call);
1007 for (x = eh_list; x; x = XEXP (x, 1))
1009 ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0)));
1012 make_label_edge (bb, ptr->handler_label,
1013 EDGE_ABNORMAL | EDGE_EH | is_call);
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1029 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1030 make_label_edge (bb, XEXP (x, 0),
1031 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1035 /* We know something about the structure of the function __throw in
1036 libgcc2.c. It is the only function that ever contains eh_stub
1037 labels. It modifies its return address so that the last block
1038 returns to one of the eh_stub labels within it. So we have to
1039 make additional edges in the flow graph. */
1040 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1041 make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1043 /* Find out if we can drop through to the next block. */
1044 insn = next_nonnote_insn (insn);
1045 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1046 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1047 else if (i + 1 < n_basic_blocks)
1049 rtx tmp = BLOCK_HEAD (i + 1);
1050 if (GET_CODE (tmp) == NOTE)
1051 tmp = next_nonnote_insn (tmp);
1052 if (force_fallthru || insn == tmp)
1053 make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1058 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1059 about the edge that is accumulated between calls. */
1062 make_edge (src, dst, flags)
1063 basic_block src, dst;
1068 /* Make sure we don't add duplicate edges. */
1070 for (e = src->succ; e ; e = e->succ_next)
1077 e = (edge) xcalloc (1, sizeof (*e));
1079 e->succ_next = src->succ;
1080 e->pred_next = dst->pred;
1089 /* Create an edge from a basic block to a label. */
1092 make_label_edge (src, label, flags)
1097 if (GET_CODE (label) != CODE_LABEL)
1100 /* If the label was never emitted, this insn is junk, but avoid a
1101 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1102 as a result of a syntax error and a diagnostic has already been
1105 if (INSN_UID (label) == 0)
1108 make_edge (src, BLOCK_FOR_INSN (label), flags);
1111 /* Identify critical edges and set the bits appropriately. */
1113 mark_critical_edges ()
1115 int i, n = n_basic_blocks;
1118 /* We begin with the entry block. This is not terribly important now,
1119 but could be if a front end (Fortran) implemented alternate entry
1121 bb = ENTRY_BLOCK_PTR;
1128 /* (1) Critical edges must have a source with multiple successors. */
1129 if (bb->succ && bb->succ->succ_next)
1131 for (e = bb->succ; e ; e = e->succ_next)
1133 /* (2) Critical edges must have a destination with multiple
1134 predecessors. Note that we know there is at least one
1135 predecessor -- the edge we followed to get here. */
1136 if (e->dest->pred->pred_next)
1137 e->flags |= EDGE_CRITICAL;
1139 e->flags &= ~EDGE_CRITICAL;
1144 for (e = bb->succ; e ; e = e->succ_next)
1145 e->flags &= ~EDGE_CRITICAL;
1150 bb = BASIC_BLOCK (i);
1154 /* Split a (typically critical) edge. Return the new block.
1155 Abort on abnormal edges.
1157 ??? The code generally expects to be called on critical edges.
1158 The case of a block ending in an unconditional jump to a
1159 block with multiple predecessors is not handled optimally. */
1162 split_edge (edge_in)
1165 basic_block old_pred, bb, old_succ;
1170 /* Abnormal edges cannot be split. */
1171 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1174 old_pred = edge_in->src;
1175 old_succ = edge_in->dest;
1177 /* Remove the existing edge from the destination's pred list. */
1180 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1182 *pp = edge_in->pred_next;
1183 edge_in->pred_next = NULL;
1186 /* Create the new structures. */
1187 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1188 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1190 memset (bb, 0, sizeof (*bb));
1191 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1192 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1193 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1195 /* ??? This info is likely going to be out of date very soon. */
1196 CLEAR_REG_SET (bb->local_set);
1197 if (old_succ->global_live_at_start)
1199 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1200 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1204 CLEAR_REG_SET (bb->global_live_at_start);
1205 CLEAR_REG_SET (bb->global_live_at_end);
1210 bb->succ = edge_out;
1213 edge_in->flags &= ~EDGE_CRITICAL;
1215 edge_out->pred_next = old_succ->pred;
1216 edge_out->succ_next = NULL;
1218 edge_out->dest = old_succ;
1219 edge_out->flags = EDGE_FALLTHRU;
1220 edge_out->probability = REG_BR_PROB_BASE;
1222 old_succ->pred = edge_out;
1224 /* Tricky case -- if there existed a fallthru into the successor
1225 (and we're not it) we must add a new unconditional jump around
1226 the new block we're actually interested in.
1228 Further, if that edge is critical, this means a second new basic
1229 block must be created to hold it. In order to simplify correct
1230 insn placement, do this before we touch the existing basic block
1231 ordering for the block we were really wanting. */
1232 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1235 for (e = edge_out->pred_next; e ; e = e->pred_next)
1236 if (e->flags & EDGE_FALLTHRU)
1241 basic_block jump_block;
1244 if ((e->flags & EDGE_CRITICAL) == 0)
1246 /* Non critical -- we can simply add a jump to the end
1247 of the existing predecessor. */
1248 jump_block = e->src;
1252 /* We need a new block to hold the jump. The simplest
1253 way to do the bulk of the work here is to recursively
1255 jump_block = split_edge (e);
1256 e = jump_block->succ;
1259 /* Now add the jump insn ... */
1260 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1262 jump_block->end = pos;
1263 emit_barrier_after (pos);
1265 /* ... let jump know that label is in use, ... */
1266 ++LABEL_NUSES (old_succ->head);
1268 /* ... and clear fallthru on the outgoing edge. */
1269 e->flags &= ~EDGE_FALLTHRU;
1271 /* Continue splitting the interesting edge. */
1275 /* Place the new block just in front of the successor. */
1276 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1277 for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1279 basic_block tmp = BASIC_BLOCK (i - 1);
1280 BASIC_BLOCK (i) = tmp;
1283 BASIC_BLOCK (i) = bb;
1286 /* Create the basic block note. */
1287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1288 NOTE_BASIC_BLOCK (bb_note) = bb;
1289 bb->head = bb->end = bb_note;
1291 /* Not quite simple -- for non-fallthru edges, we must adjust the
1292 predecessor's jump instruction to target our new block. */
1293 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1295 rtx tmp, insn = old_pred->end;
1296 rtx old_label = old_succ->head;
1297 rtx new_label = gen_label_rtx ();
1299 if (GET_CODE (insn) != JUMP_INSN)
1302 /* ??? Recognize a tablejump and adjust all matching cases. */
1303 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1304 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1305 && GET_CODE (tmp) == JUMP_INSN
1306 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1307 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1312 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1313 vec = XVEC (PATTERN (tmp), 0);
1315 vec = XVEC (PATTERN (tmp), 1);
1317 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1318 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1320 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1321 --LABEL_NUSES (old_label);
1322 ++LABEL_NUSES (new_label);
1327 /* This would have indicated an abnormal edge. */
1328 if (computed_jump_p (insn))
1331 /* A return instruction can't be redirected. */
1332 if (returnjump_p (insn))
1335 /* If the insn doesn't go where we think, we're confused. */
1336 if (JUMP_LABEL (insn) != old_label)
1339 redirect_jump (insn, new_label);
1342 emit_label_before (new_label, bb_note);
1343 bb->head = new_label;
1349 /* Queue instructions for insertion on an edge between two basic blocks.
1350 The new instructions and basic blocks (if any) will not appear in the
1351 CFG until commit_edge_insertions is called. */
1354 insert_insn_on_edge (pattern, e)
1358 /* We cannot insert instructions on an abnormal critical edge.
1359 It will be easier to find the culprit if we die now. */
1360 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1361 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1364 if (e->insns == NULL_RTX)
1367 push_to_sequence (e->insns);
1369 emit_insn (pattern);
1371 e->insns = get_insns ();
1375 /* Update the CFG for the instructions queued on edge E. */
1378 commit_one_edge_insertion (e)
1381 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1384 /* Figure out where to put these things. If the destination has
1385 one predecessor, insert there. Except for the exit block. */
1386 if (e->dest->pred->pred_next == NULL
1387 && e->dest != EXIT_BLOCK_PTR)
1391 /* Get the location correct wrt a code label, and "nice" wrt
1392 a basic block note, and before everything else. */
1394 if (GET_CODE (tmp) == CODE_LABEL)
1395 tmp = NEXT_INSN (tmp);
1396 if (GET_CODE (tmp) == NOTE
1397 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1398 tmp = NEXT_INSN (tmp);
1399 if (tmp == bb->head)
1402 after = PREV_INSN (tmp);
1405 /* If the source has one successor and the edge is not abnormal,
1406 insert there. Except for the entry block. */
1407 else if ((e->flags & EDGE_ABNORMAL) == 0
1408 && e->src->succ->succ_next == NULL
1409 && e->src != ENTRY_BLOCK_PTR)
1412 if (GET_CODE (bb->end) == JUMP_INSN)
1414 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1415 a jump with delay slots already filled? */
1416 if (! simplejump_p (bb->end))
1423 /* We'd better be fallthru, or we've lost track of what's what. */
1424 if ((e->flags & EDGE_FALLTHRU) == 0)
1431 /* Otherwise we must split the edge. */
1434 bb = split_edge (e);
1438 /* Now that we've found the spot, do the insertion. */
1440 e->insns = NULL_RTX;
1443 emit_insns_before (tmp, before);
1444 if (before == bb->head)
1449 tmp = emit_insns_after (tmp, after);
1450 if (after == bb->end)
1455 /* Update the CFG for all queued instructions. */
1458 commit_edge_insertions ()
1464 bb = ENTRY_BLOCK_PTR;
1469 for (e = bb->succ; e ; e = next)
1471 next = e->succ_next;
1473 commit_one_edge_insertion (e);
1476 if (++i >= n_basic_blocks)
1478 bb = BASIC_BLOCK (i);
1482 /* Delete all unreachable basic blocks. */
1485 delete_unreachable_blocks ()
1487 basic_block *worklist, *tos;
1488 int deleted_handler;
1493 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1495 /* Use basic_block->aux as a marker. Clear them all. */
1497 for (i = 0; i < n; ++i)
1498 BASIC_BLOCK (i)->aux = NULL;
1500 /* Add our starting points to the worklist. Almost always there will
1501 be only one. It isn't inconcievable that we might one day directly
1502 support Fortran alternate entry points. */
1504 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1508 /* Mark the block with a handy non-null value. */
1512 /* Iterate: find everything reachable from what we've already seen. */
1514 while (tos != worklist)
1516 basic_block b = *--tos;
1518 for (e = b->succ; e ; e = e->succ_next)
1526 /* Delete all unreachable basic blocks. Count down so that we don't
1527 interfere with the block renumbering that happens in delete_block. */
1529 deleted_handler = 0;
1531 for (i = n - 1; i >= 0; --i)
1533 basic_block b = BASIC_BLOCK (i);
1536 /* This block was found. Tidy up the mark. */
1539 deleted_handler |= delete_block (b);
1542 /* Fix up edges that now fall through, or rather should now fall through
1543 but previously required a jump around now deleted blocks. Simplify
1544 the search by only examining blocks numerically adjacent, since this
1545 is how find_basic_blocks created them. */
1547 for (i = 1; i < n_basic_blocks; ++i)
1549 basic_block b = BASIC_BLOCK (i - 1);
1550 basic_block c = BASIC_BLOCK (i);
1553 /* We care about simple conditional or unconditional jumps with
1556 If we had a conditional branch to the next instruction when
1557 find_basic_blocks was called, then there will only be one
1558 out edge for the block which ended with the conditional
1559 branch (since we do not create duplicate edges).
1561 Furthermore, the edge will be marked as a fallthru because we
1562 merge the flags for the duplicate edges. So we do not want to
1563 check that the edge is not a FALLTHRU edge. */
1564 if ((s = b->succ) != NULL
1565 && s->succ_next == NULL
1567 /* If the last insn is not a normal conditional jump
1568 (or an unconditional jump), then we can not tidy the
1569 fallthru edge because we can not delete the jump. */
1570 && GET_CODE (b->end) == JUMP_INSN
1571 && condjump_p (b->end))
1572 tidy_fallthru_edge (s, b, c);
1575 /* Attempt to merge blocks as made possible by edge removal. If a block
1576 has only one successor, and the successor has only one predecessor,
1577 they may be combined. */
1579 for (i = 0; i < n_basic_blocks; )
1581 basic_block c, b = BASIC_BLOCK (i);
1584 /* A loop because chains of blocks might be combineable. */
1585 while ((s = b->succ) != NULL
1586 && s->succ_next == NULL
1587 && (s->flags & EDGE_EH) == 0
1588 && (c = s->dest) != EXIT_BLOCK_PTR
1589 && c->pred->pred_next == NULL
1590 /* If the last insn is not a normal conditional jump
1591 (or an unconditional jump), then we can not merge
1592 the blocks because we can not delete the jump. */
1593 && GET_CODE (b->end) == JUMP_INSN
1594 && condjump_p (b->end)
1595 && merge_blocks (s, b, c))
1598 /* Don't get confused by the index shift caused by deleting blocks. */
1602 /* If we deleted an exception handler, we may have EH region begin/end
1603 blocks to remove as well. */
1604 if (deleted_handler)
1605 delete_eh_regions ();
1608 /* Find EH regions for which there is no longer a handler, and delete them. */
1611 delete_eh_regions ()
1615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1616 if (GET_CODE (insn) == NOTE)
1618 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1619 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1621 int num = CODE_LABEL_NUMBER (insn);
1622 /* A NULL handler indicates a region is no longer needed */
1623 if (get_first_handler (num) == NULL)
1625 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1626 NOTE_SOURCE_FILE (insn) = 0;
1632 /* Return true if NOTE is not one of the ones that must be kept paired,
1633 so that we may simply delete them. */
1636 can_delete_note_p (note)
1639 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1640 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1643 /* Unlink a chain of insns between START and FINISH, leaving notes
1644 that must be paired. */
1647 delete_insn_chain (start, finish)
1650 /* Unchain the insns one by one. It would be quicker to delete all
1651 of these with a single unchaining, rather than one at a time, but
1652 we need to keep the NOTE's. */
1658 next = NEXT_INSN (start);
1659 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1661 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1664 next = flow_delete_insn (start);
1666 if (start == finish)
1672 /* Delete the insns in a (non-live) block. We physically delete every
1673 non-deleted-note insn, and update the flow graph appropriately.
1675 Return nonzero if we deleted an exception handler. */
1677 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1678 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1684 int deleted_handler = 0;
1687 /* If the head of this block is a CODE_LABEL, then it might be the
1688 label for an exception handler which can't be reached.
1690 We need to remove the label from the exception_handler_label list
1691 and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END
1696 if (GET_CODE (insn) == CODE_LABEL)
1698 rtx x, *prev = &exception_handler_labels;
1700 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1702 if (XEXP (x, 0) == insn)
1704 /* Found a match, splice this label out of the EH label list. */
1705 *prev = XEXP (x, 1);
1706 XEXP (x, 1) = NULL_RTX;
1707 XEXP (x, 0) = NULL_RTX;
1709 /* Remove the handler from all regions */
1710 remove_handler (insn);
1711 deleted_handler = 1;
1714 prev = &XEXP (x, 1);
1717 /* This label may be referenced by code solely for its value, or
1718 referenced by static data, or something. We have determined
1719 that it is not reachable, but cannot delete the label itself.
1720 Save code space and continue to delete the balance of the block,
1721 along with properly updating the cfg. */
1722 if (!can_delete_label_p (insn))
1724 /* If we've only got one of these, skip the whole deleting
1727 goto no_delete_insns;
1728 insn = NEXT_INSN (insn);
1732 /* Include any jump table following the basic block. */
1734 if (GET_CODE (end) == JUMP_INSN
1735 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1736 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1737 && GET_CODE (tmp) == JUMP_INSN
1738 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1739 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1742 /* Include any barrier that may follow the basic block. */
1743 tmp = next_nonnote_insn (b->end);
1744 if (tmp && GET_CODE (tmp) == BARRIER)
1747 delete_insn_chain (insn, end);
1751 /* Remove the edges into and out of this block. Note that there may
1752 indeed be edges in, if we are removing an unreachable loop. */
1756 for (e = b->pred; e ; e = next)
1758 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1761 next = e->pred_next;
1764 for (e = b->succ; e ; e = next)
1766 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1769 next = e->succ_next;
1777 /* Remove the basic block from the array, and compact behind it. */
1780 return deleted_handler;
1783 /* Remove block B from the basic block array and compact behind it. */
1789 int i, n = n_basic_blocks;
1791 for (i = b->index; i + 1 < n; ++i)
1793 basic_block x = BASIC_BLOCK (i + 1);
1794 BASIC_BLOCK (i) = x;
1798 basic_block_info->num_elements--;
1802 /* Delete INSN by patching it out. Return the next insn. */
1805 flow_delete_insn (insn)
1808 rtx prev = PREV_INSN (insn);
1809 rtx next = NEXT_INSN (insn);
1812 PREV_INSN (insn) = NULL_RTX;
1813 NEXT_INSN (insn) = NULL_RTX;
1816 NEXT_INSN (prev) = next;
1818 PREV_INSN (next) = prev;
1820 set_last_insn (prev);
1822 if (GET_CODE (insn) == CODE_LABEL)
1823 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1825 /* If deleting a jump, decrement the use count of the label. Deleting
1826 the label itself should happen in the normal course of block merging. */
1827 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1828 LABEL_NUSES (JUMP_LABEL (insn))--;
1830 /* Also if deleting an insn that references a label. */
1831 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
1832 LABEL_NUSES (XEXP (note, 0))--;
1837 /* True if a given label can be deleted. */
1840 can_delete_label_p (label)
1845 if (LABEL_PRESERVE_P (label))
1848 for (x = forced_labels; x ; x = XEXP (x, 1))
1849 if (label == XEXP (x, 0))
1851 for (x = label_value_list; x ; x = XEXP (x, 1))
1852 if (label == XEXP (x, 0))
1854 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1855 if (label == XEXP (x, 0))
1858 /* User declared labels must be preserved. */
1859 if (LABEL_NAME (label) != 0)
1865 /* Blocks A and B are to be merged into a single block. The insns
1866 are already contiguous, hence `nomove'. */
1869 merge_blocks_nomove (a, b)
1873 rtx b_head, b_end, a_end;
1876 /* If there was a CODE_LABEL beginning B, delete it. */
1879 if (GET_CODE (b_head) == CODE_LABEL)
1881 /* Detect basic blocks with nothing but a label. This can happen
1882 in particular at the end of a function. */
1883 if (b_head == b_end)
1885 b_head = flow_delete_insn (b_head);
1888 /* Delete the basic block note. */
1889 if (GET_CODE (b_head) == NOTE
1890 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1892 if (b_head == b_end)
1894 b_head = flow_delete_insn (b_head);
1897 /* If there was a jump out of A, delete it. */
1899 if (GET_CODE (a_end) == JUMP_INSN)
1903 prev = prev_nonnote_insn (a_end);
1908 /* If this was a conditional jump, we need to also delete
1909 the insn that set cc0. */
1911 if (prev && sets_cc0_p (prev))
1914 prev = prev_nonnote_insn (prev);
1917 flow_delete_insn (tmp);
1921 /* Note that a->head != a->end, since we should have at least a
1922 bb note plus the jump, so prev != insn. */
1923 flow_delete_insn (a_end);
1927 /* By definition, there should only be one successor of A, and that is
1928 B. Free that edge struct. */
1931 /* Adjust the edges out of B for the new owner. */
1932 for (e = b->succ; e ; e = e->succ_next)
1936 /* Reassociate the insns of B with A. */
1939 BLOCK_FOR_INSN (b_head) = a;
1940 while (b_head != b_end)
1942 b_head = NEXT_INSN (b_head);
1943 BLOCK_FOR_INSN (b_head) = a;
1949 /* Compact the basic block array. */
1953 /* Attempt to merge basic blocks that are potentially non-adjacent.
1954 Return true iff the attempt succeeded. */
1957 merge_blocks (e, b, c)
1961 /* If B has a fallthru edge to C, no need to move anything. */
1962 if (!(e->flags & EDGE_FALLTHRU))
1964 /* ??? From here on out we must make sure to not munge nesting
1965 of exception regions and lexical blocks. Need to think about
1966 these cases before this gets implemented. */
1969 /* If C has an outgoing fallthru, and B does not have an incoming
1970 fallthru, move B before C. The later clause is somewhat arbitrary,
1971 but avoids modifying blocks other than the two we've been given. */
1973 /* Otherwise, move C after B. If C had a fallthru, which doesn't
1974 happen to be the physical successor to B, insert an unconditional
1975 branch. If C already ended with a conditional branch, the new
1976 jump must go in a new basic block D. */
1979 /* If a label still appears somewhere and we cannot delete the label,
1980 then we cannot merge the blocks. The edge was tidied already. */
1982 rtx insn, stop = NEXT_INSN (c->head);
1983 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1984 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1988 merge_blocks_nomove (b, c);
1992 /* The given edge should potentially a fallthru edge. If that is in
1993 fact true, delete the unconditional jump and barriers that are in
1997 tidy_fallthru_edge (e, b, c)
2003 /* ??? In a late-running flow pass, other folks may have deleted basic
2004 blocks by nopping out blocks, leaving multiple BARRIERs between here
2005 and the target label. They ought to be chastized and fixed.
2007 We can also wind up with a sequence of undeletable labels between
2008 one block and the next.
2010 So search through a sequence of barriers, labels, and notes for
2011 the head of block C and assert that we really do fall through. */
2013 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2016 /* Remove what will soon cease being the jump insn from the source block.
2017 If block B consisted only of this single jump, turn it into a deleted
2020 if (GET_CODE (q) == JUMP_INSN)
2023 /* If this was a conditional jump, we need to also delete
2024 the insn that set cc0. */
2025 if (! simplejump_p (q) && condjump_p (q))
2032 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2033 NOTE_SOURCE_FILE (q) = 0;
2036 b->end = q = PREV_INSN (q);
2039 /* Selectively unlink the sequence. */
2040 if (q != PREV_INSN (c->head))
2041 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2043 e->flags |= EDGE_FALLTHRU;
2046 /* Discover and record the loop depth at the head of each basic block. */
2049 calculate_loop_depth (insns)
2054 int i = 0, depth = 1;
2056 bb = BASIC_BLOCK (i);
2057 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2059 if (insn == bb->head)
2061 bb->loop_depth = depth;
2062 if (++i >= n_basic_blocks)
2064 bb = BASIC_BLOCK (i);
2067 if (GET_CODE (insn) == NOTE)
2069 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2071 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2074 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2081 /* Perform data flow analysis.
2082 F is the first insn of the function and NREGS the number of register numbers
2086 life_analysis (f, nregs, file, remove_dead_code)
2090 int remove_dead_code;
2092 #ifdef ELIMINABLE_REGS
2094 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2097 /* Record which registers will be eliminated. We use this in
2100 CLEAR_HARD_REG_SET (elim_reg_set);
2102 #ifdef ELIMINABLE_REGS
2103 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2104 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2106 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2109 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2110 uid_volatile = BITMAP_ALLOCA ();
2112 /* We want alias analysis information for local dead store elimination. */
2113 init_alias_analysis ();
2114 life_analysis_1 (f, nregs, remove_dead_code);
2115 end_alias_analysis ();
2118 dump_flow_info (file);
2120 BITMAP_FREE (uid_volatile);
2121 free_basic_block_vars (1);
2124 /* Free the variables allocated by find_basic_blocks.
2126 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2129 free_basic_block_vars (keep_head_end_p)
2130 int keep_head_end_p;
2132 if (basic_block_for_insn)
2134 VARRAY_FREE (basic_block_for_insn);
2135 basic_block_for_insn = NULL;
2138 if (! keep_head_end_p)
2141 VARRAY_FREE (basic_block_info);
2144 ENTRY_BLOCK_PTR->aux = NULL;
2145 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2146 EXIT_BLOCK_PTR->aux = NULL;
2147 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2151 /* Return nonzero if the destination of SET equals the source. */
2156 rtx src = SET_SRC (set);
2157 rtx dst = SET_DEST (set);
2158 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2159 && REGNO (src) == REGNO (dst))
2161 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2162 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2164 src = SUBREG_REG (src);
2165 dst = SUBREG_REG (dst);
2166 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2167 && REGNO (src) == REGNO (dst))
2172 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2178 rtx pat = PATTERN (insn);
2180 /* Insns carrying these notes are useful later on. */
2181 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2184 if (GET_CODE (pat) == SET && set_noop_p (pat))
2187 if (GET_CODE (pat) == PARALLEL)
2190 /* If nothing but SETs of registers to themselves,
2191 this insn can also be deleted. */
2192 for (i = 0; i < XVECLEN (pat, 0); i++)
2194 rtx tem = XVECEXP (pat, 0, i);
2196 if (GET_CODE (tem) == USE
2197 || GET_CODE (tem) == CLOBBER)
2200 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2210 notice_stack_pointer_modification (x, pat)
2212 rtx pat ATTRIBUTE_UNUSED;
2214 if (x == stack_pointer_rtx
2215 /* The stack pointer is only modified indirectly as the result
2216 of a push until later in flow. See the comments in rtl.texi
2217 regarding Embedded Side-Effects on Addresses. */
2218 || (GET_CODE (x) == MEM
2219 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2220 || GET_CODE (XEXP (x, 0)) == PRE_INC
2221 || GET_CODE (XEXP (x, 0)) == POST_DEC
2222 || GET_CODE (XEXP (x, 0)) == POST_INC)
2223 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2224 current_function_sp_is_unchanging = 0;
2227 /* Record which insns refer to any volatile memory
2228 or for any reason can't be deleted just because they are dead stores.
2229 Also, delete any insns that copy a register to itself.
2230 And see if the stack pointer is modified. */
2232 record_volatile_insns (f)
2236 for (insn = f; insn; insn = NEXT_INSN (insn))
2238 enum rtx_code code1 = GET_CODE (insn);
2239 if (code1 == CALL_INSN)
2240 SET_INSN_VOLATILE (insn);
2241 else if (code1 == INSN || code1 == JUMP_INSN)
2243 if (GET_CODE (PATTERN (insn)) != USE
2244 && volatile_refs_p (PATTERN (insn)))
2245 SET_INSN_VOLATILE (insn);
2247 /* A SET that makes space on the stack cannot be dead.
2248 (Such SETs occur only for allocating variable-size data,
2249 so they will always have a PLUS or MINUS according to the
2250 direction of stack growth.)
2251 Even if this function never uses this stack pointer value,
2252 signal handlers do! */
2253 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2254 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2255 #ifdef STACK_GROWS_DOWNWARD
2256 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2258 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2260 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2261 SET_INSN_VOLATILE (insn);
2263 /* Delete (in effect) any obvious no-op moves. */
2264 else if (noop_move_p (insn))
2266 PUT_CODE (insn, NOTE);
2267 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2268 NOTE_SOURCE_FILE (insn) = 0;
2272 /* Check if insn modifies the stack pointer. */
2273 if ( current_function_sp_is_unchanging
2274 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2275 note_stores (PATTERN (insn), notice_stack_pointer_modification);
2279 /* Mark those regs which are needed at the end of the function as live
2280 at the end of the last basic block. */
2282 mark_regs_live_at_end (set)
2287 /* If exiting needs the right stack value, consider the stack pointer
2288 live at the end of the function. */
2289 if (! EXIT_IGNORE_STACK
2290 || (! FRAME_POINTER_REQUIRED
2291 && ! current_function_calls_alloca
2292 && flag_omit_frame_pointer)
2293 || current_function_sp_is_unchanging)
2295 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2298 /* Mark the frame pointer if needed at the end of the function. If
2299 we end up eliminating it, it will be removed from the live list
2300 of each basic block by reload. */
2302 if (! reload_completed || frame_pointer_needed)
2304 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2305 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2306 /* If they are different, also mark the hard frame pointer as live */
2307 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2311 /* Mark all global registers, and all registers used by the epilogue
2312 as being live at the end of the function since they may be
2313 referenced by our caller. */
2314 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2316 #ifdef EPILOGUE_USES
2317 || EPILOGUE_USES (i)
2320 SET_REGNO_REG_SET (set, i);
2322 /* ??? Mark function return value here rather than as uses. */
2325 /* Determine which registers are live at the start of each
2326 basic block of the function whose first insn is F.
2327 NREGS is the number of registers used in F.
2328 We allocate the vector basic_block_live_at_start
2329 and the regsets that it points to, and fill them with the data.
2330 regset_size and regset_bytes are also set here. */
2333 life_analysis_1 (f, nregs, remove_dead_code)
2336 int remove_dead_code;
2341 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2342 regset *new_live_at_end;
2344 struct obstack flow_obstack;
2346 gcc_obstack_init (&flow_obstack);
2350 /* Allocate and zero out many data structures
2351 that will record the data from lifetime analysis. */
2353 allocate_reg_life_data ();
2354 allocate_bb_life_data ();
2356 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2357 memset (reg_next_use, 0, nregs * sizeof (rtx));
2359 /* Set up regset-vectors used internally within this function.
2360 Their meanings are documented above, with their declarations. */
2362 new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2363 init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2365 /* Stick these vectors into the AUX field of the basic block, so that
2366 we don't have to keep going through the index. */
2368 for (i = 0; i < n_basic_blocks; ++i)
2369 BASIC_BLOCK (i)->aux = new_live_at_end[i];
2370 ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2372 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2373 This will be cleared by record_volatile_insns if it encounters an insn
2374 which modifies the stack pointer. */
2375 current_function_sp_is_unchanging = !current_function_calls_alloca;
2377 record_volatile_insns (f);
2379 if (n_basic_blocks > 0)
2384 theend = EXIT_BLOCK_PTR->global_live_at_start;
2385 mark_regs_live_at_end (theend);
2387 /* Propogate this exit data to each of EXIT's predecessors. */
2388 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2390 COPY_REG_SET (e->src->global_live_at_end, theend);
2391 COPY_REG_SET ((regset) e->src->aux, theend);
2395 /* The post-reload life analysis have (on a global basis) the same registers
2396 live as was computed by reload itself.
2398 Otherwise elimination offsets and such may be incorrect.
2400 Reload will make some registers as live even though they do not appear
2402 if (reload_completed)
2403 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2404 memset (regs_ever_live, 0, sizeof regs_ever_live);
2406 /* Propagate life info through the basic blocks
2407 around the graph of basic blocks.
2409 This is a relaxation process: each time a new register
2410 is live at the end of the basic block, we must scan the block
2411 to determine which registers are, as a consequence, live at the beginning
2412 of that block. These registers must then be marked live at the ends
2413 of all the blocks that can transfer control to that block.
2414 The process continues until it reaches a fixed point. */
2421 for (i = n_basic_blocks - 1; i >= 0; i--)
2423 basic_block bb = BASIC_BLOCK (i);
2424 int consider = first_pass;
2425 int must_rescan = first_pass;
2430 /* Set CONSIDER if this block needs thinking about at all
2431 (that is, if the regs live now at the end of it
2432 are not the same as were live at the end of it when
2433 we last thought about it).
2434 Set must_rescan if it needs to be thought about
2435 instruction by instruction (that is, if any additional
2436 reg that is live at the end now but was not live there before
2437 is one of the significant regs of this basic block). */
2439 EXECUTE_IF_AND_COMPL_IN_REG_SET
2440 ((regset) bb->aux, bb->global_live_at_end, 0, j,
2443 if (REGNO_REG_SET_P (bb->local_set, j))
2454 /* The live_at_start of this block may be changing,
2455 so another pass will be required after this one. */
2460 /* No complete rescan needed;
2461 just record those variables newly known live at end
2462 as live at start as well. */
2463 IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2465 bb->global_live_at_end);
2467 IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2469 bb->global_live_at_end);
2473 /* Update the basic_block_live_at_start
2474 by propagation backwards through the block. */
2475 COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2476 COPY_REG_SET (bb->global_live_at_start,
2477 bb->global_live_at_end);
2478 propagate_block (bb->global_live_at_start,
2479 bb->head, bb->end, 0,
2480 first_pass ? bb->local_set : (regset) 0,
2481 i, remove_dead_code);
2484 /* Update the new_live_at_end's of the block's predecessors. */
2488 for (e = bb->pred; e ; e = e->pred_next)
2489 IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2499 /* The only pseudos that are live at the beginning of the function are
2500 those that were not set anywhere in the function. local-alloc doesn't
2501 know how to handle these correctly, so mark them as not local to any
2504 if (n_basic_blocks > 0)
2505 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2506 FIRST_PSEUDO_REGISTER, i,
2508 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2511 /* Now the life information is accurate. Make one more pass over each
2512 basic block to delete dead stores, create autoincrement addressing
2513 and record how many times each register is used, is set, or dies. */
2515 for (i = 0; i < n_basic_blocks; i++)
2517 basic_block bb = BASIC_BLOCK (i);
2519 /* We start with global_live_at_end to determine which stores are
2520 dead. This process is destructive, and we wish to preserve the
2521 contents of global_live_at_end for posterity. Fortunately,
2522 new_live_at_end, due to the way we converged on a solution,
2523 contains a duplicate of global_live_at_end that we can kill. */
2524 propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2531 /* We have a problem with any pseudoreg that lives across the setjmp.
2532 ANSI says that if a user variable does not change in value between
2533 the setjmp and the longjmp, then the longjmp preserves it. This
2534 includes longjmp from a place where the pseudo appears dead.
2535 (In principle, the value still exists if it is in scope.)
2536 If the pseudo goes in a hard reg, some other value may occupy
2537 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2538 Conclusion: such a pseudo must not go in a hard reg. */
2539 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2540 FIRST_PSEUDO_REGISTER, i,
2542 if (regno_reg_rtx[i] != 0)
2544 REG_LIVE_LENGTH (i) = -1;
2545 REG_BASIC_BLOCK (i) = -1;
2549 /* Restore regs_ever_live that was provided by reload. */
2550 if (reload_completed)
2551 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2553 free_regset_vector (new_live_at_end, n_basic_blocks);
2554 obstack_free (&flow_obstack, NULL_PTR);
2556 for (i = 0; i < n_basic_blocks; ++i)
2557 BASIC_BLOCK (i)->aux = NULL;
2558 ENTRY_BLOCK_PTR->aux = NULL;
2561 /* Subroutines of life analysis. */
2563 /* Allocate the permanent data structures that represent the results
2564 of life analysis. Not static since used also for stupid life analysis. */
2567 allocate_bb_life_data ()
2571 for (i = 0; i < n_basic_blocks; i++)
2573 basic_block bb = BASIC_BLOCK (i);
2575 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2576 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2577 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2580 ENTRY_BLOCK_PTR->global_live_at_end
2581 = OBSTACK_ALLOC_REG_SET (function_obstack);
2582 EXIT_BLOCK_PTR->global_live_at_start
2583 = OBSTACK_ALLOC_REG_SET (function_obstack);
2585 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2589 allocate_reg_life_data ()
2593 /* Recalculate the register space, in case it has grown. Old style
2594 vector oriented regsets would set regset_{size,bytes} here also. */
2595 allocate_reg_info (max_regno, FALSE, FALSE);
2597 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2598 information, explicitly reset it here. The allocation should have
2599 already happened on the previous reg_scan pass. Make sure in case
2600 some more registers were allocated. */
2601 for (i = 0; i < max_regno; i++)
2605 /* Make each element of VECTOR point at a regset. The vector has
2606 NELTS elements, and space is allocated from the ALLOC_OBSTACK
2610 init_regset_vector (vector, nelts, alloc_obstack)
2613 struct obstack *alloc_obstack;
2617 for (i = 0; i < nelts; i++)
2619 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2620 CLEAR_REG_SET (vector[i]);
2624 /* Release any additional space allocated for each element of VECTOR point
2625 other than the regset header itself. The vector has NELTS elements. */
2628 free_regset_vector (vector, nelts)
2634 for (i = 0; i < nelts; i++)
2635 FREE_REG_SET (vector[i]);
2638 /* Compute the registers live at the beginning of a basic block
2639 from those live at the end.
2641 When called, OLD contains those live at the end.
2642 On return, it contains those live at the beginning.
2643 FIRST and LAST are the first and last insns of the basic block.
2645 FINAL is nonzero if we are doing the final pass which is not
2646 for computing the life info (since that has already been done)
2647 but for acting on it. On this pass, we delete dead stores,
2648 set up the logical links and dead-variables lists of instructions,
2649 and merge instructions for autoincrement and autodecrement addresses.
2651 SIGNIFICANT is nonzero only the first time for each basic block.
2652 If it is nonzero, it points to a regset in which we store
2653 a 1 for each register that is set within the block.
2655 BNUM is the number of the basic block. */
2658 propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2659 register regset old;
2665 int remove_dead_code;
2672 /* Find the loop depth for this block. Ignore loop level changes in the
2673 middle of the basic block -- for register allocation purposes, the
2674 important uses will be in the blocks wholely contained within the loop
2675 not in the loop pre-header or post-trailer. */
2676 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2678 dead = ALLOCA_REG_SET ();
2679 live = ALLOCA_REG_SET ();
2682 mem_set_list = NULL_RTX;
2688 /* Process the regs live at the end of the block.
2689 Mark them as not local to any one basic block. */
2690 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2692 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2696 /* Scan the block an insn at a time from end to beginning. */
2698 for (insn = last; ; insn = prev)
2700 prev = PREV_INSN (insn);
2702 if (GET_CODE (insn) == NOTE)
2704 /* If this is a call to `setjmp' et al,
2705 warn if any non-volatile datum is live. */
2707 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2708 IOR_REG_SET (regs_live_at_setjmp, old);
2711 /* Update the life-status of regs for this insn.
2712 First DEAD gets which regs are set in this insn
2713 then LIVE gets which regs are used in this insn.
2714 Then the regs live before the insn
2715 are those live after, with DEAD regs turned off,
2716 and then LIVE regs turned on. */
2718 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2721 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2722 int insn_is_dead = 0;
2723 int libcall_is_dead = 0;
2725 if (remove_dead_code)
2727 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2728 /* Don't delete something that refers to volatile storage! */
2729 && ! INSN_VOLATILE (insn));
2730 libcall_is_dead = (insn_is_dead && note != 0
2731 && libcall_dead_p (PATTERN (insn), old, note, insn));
2734 /* If an instruction consists of just dead store(s) on final pass,
2735 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2736 We could really delete it with delete_insn, but that
2737 can cause trouble for first or last insn in a basic block. */
2738 if (final && insn_is_dead)
2741 /* If the insn referred to a label, note that the label is
2743 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
2745 if (REG_NOTE_KIND (inote) == REG_LABEL)
2748 rtx label = XEXP (inote, 0);
2750 LABEL_NUSES (label)--;
2752 /* The label may be forced if it has been put in the
2753 constant pool. We can't delete it in this case, but
2754 we still must discard a jump table following it. */
2756 if (LABEL_PRESERVE_P (label))
2759 /* If this label was attached to an ADDR_VEC, it's
2760 safe to delete the ADDR_VEC. In fact, it's pretty much
2761 mandatory to delete it, because the ADDR_VEC may
2762 be referencing labels that no longer exist. */
2763 if (LABEL_NUSES (label) == n_forced
2764 && (next = next_nonnote_insn (label)) != NULL
2765 && GET_CODE (next) == JUMP_INSN
2766 && (GET_CODE (PATTERN (next)) == ADDR_VEC
2767 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
2769 rtx pat = PATTERN (next);
2770 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2771 int len = XVECLEN (pat, diff_vec_p);
2773 for (i = 0; i < len; i++)
2774 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2776 flow_delete_insn (next);
2781 PUT_CODE (insn, NOTE);
2782 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2783 NOTE_SOURCE_FILE (insn) = 0;
2785 /* CC0 is now known to be dead. Either this insn used it,
2786 in which case it doesn't anymore, or clobbered it,
2787 so the next insn can't use it. */
2790 /* If this insn is copying the return value from a library call,
2791 delete the entire library call. */
2792 if (libcall_is_dead)
2794 rtx first = XEXP (note, 0);
2796 while (INSN_DELETED_P (first))
2797 first = NEXT_INSN (first);
2802 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2803 NOTE_SOURCE_FILE (p) = 0;
2809 CLEAR_REG_SET (dead);
2810 CLEAR_REG_SET (live);
2812 /* See if this is an increment or decrement that can be
2813 merged into a following memory address. */
2816 register rtx x = single_set (insn);
2818 /* Does this instruction increment or decrement a register? */
2819 if (!reload_completed
2821 && GET_CODE (SET_DEST (x)) == REG
2822 && (GET_CODE (SET_SRC (x)) == PLUS
2823 || GET_CODE (SET_SRC (x)) == MINUS)
2824 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
2825 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2826 /* Ok, look for a following memory ref we can combine with.
2827 If one is found, change the memory ref to a PRE_INC
2828 or PRE_DEC, cancel this insn, and return 1.
2829 Return 0 if nothing has been done. */
2830 && try_pre_increment_1 (insn))
2833 #endif /* AUTO_INC_DEC */
2835 /* If this is not the final pass, and this insn is copying the
2836 value of a library call and it's dead, don't scan the
2837 insns that perform the library call, so that the call's
2838 arguments are not marked live. */
2839 if (libcall_is_dead)
2841 /* Mark the dest reg as `significant'. */
2842 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2844 insn = XEXP (note, 0);
2845 prev = PREV_INSN (insn);
2847 else if (GET_CODE (PATTERN (insn)) == SET
2848 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2849 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2850 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2851 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2852 /* We have an insn to pop a constant amount off the stack.
2853 (Such insns use PLUS regardless of the direction of the stack,
2854 and any insn to adjust the stack by a constant is always a pop.)
2855 These insns, if not dead stores, have no effect on life. */
2859 /* Any regs live at the time of a call instruction
2860 must not go in a register clobbered by calls.
2861 Find all regs now live and record this for them. */
2863 if (GET_CODE (insn) == CALL_INSN && final)
2864 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2866 REG_N_CALLS_CROSSED (i)++;
2869 /* LIVE gets the regs used in INSN;
2870 DEAD gets those set by it. Dead insns don't make anything
2873 mark_set_regs (old, dead, PATTERN (insn),
2874 final ? insn : NULL_RTX, significant);
2876 /* If an insn doesn't use CC0, it becomes dead since we
2877 assume that every insn clobbers it. So show it dead here;
2878 mark_used_regs will set it live if it is referenced. */
2882 mark_used_regs (old, live, PATTERN (insn), final, insn);
2884 /* Sometimes we may have inserted something before INSN (such as
2885 a move) when we make an auto-inc. So ensure we will scan
2888 prev = PREV_INSN (insn);
2891 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2897 for (note = CALL_INSN_FUNCTION_USAGE (insn);
2899 note = XEXP (note, 1))
2900 if (GET_CODE (XEXP (note, 0)) == USE)
2901 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
2904 /* Each call clobbers all call-clobbered regs that are not
2905 global or fixed. Note that the function-value reg is a
2906 call-clobbered reg, and mark_set_regs has already had
2907 a chance to handle it. */
2909 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2910 if (call_used_regs[i] && ! global_regs[i]
2912 SET_REGNO_REG_SET (dead, i);
2914 /* The stack ptr is used (honorarily) by a CALL insn. */
2915 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2917 /* Calls may also reference any of the global registers,
2918 so they are made live. */
2919 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2921 mark_used_regs (old, live,
2922 gen_rtx_REG (reg_raw_mode[i], i),
2925 /* Calls also clobber memory. */
2926 mem_set_list = NULL_RTX;
2929 /* Update OLD for the registers used or set. */
2930 AND_COMPL_REG_SET (old, dead);
2931 IOR_REG_SET (old, live);
2935 /* On final pass, update counts of how many insns each reg is live
2938 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2939 { REG_LIVE_LENGTH (i)++; });
2946 FREE_REG_SET (dead);
2947 FREE_REG_SET (live);
2950 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2951 (SET expressions whose destinations are registers dead after the insn).
2952 NEEDED is the regset that says which regs are alive after the insn.
2954 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2956 If X is the entire body of an insn, NOTES contains the reg notes
2957 pertaining to the insn. */
2960 insn_dead_p (x, needed, call_ok, notes)
2964 rtx notes ATTRIBUTE_UNUSED;
2966 enum rtx_code code = GET_CODE (x);
2969 /* If flow is invoked after reload, we must take existing AUTO_INC
2970 expresions into account. */
2971 if (reload_completed)
2973 for ( ; notes; notes = XEXP (notes, 1))
2975 if (REG_NOTE_KIND (notes) == REG_INC)
2977 int regno = REGNO (XEXP (notes, 0));
2979 /* Don't delete insns to set global regs. */
2980 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2981 || REGNO_REG_SET_P (needed, regno))
2988 /* If setting something that's a reg or part of one,
2989 see if that register's altered value will be live. */
2993 rtx r = SET_DEST (x);
2995 /* A SET that is a subroutine call cannot be dead. */
2996 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3000 if (GET_CODE (r) == CC0)
3004 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3007 /* Walk the set of memory locations we are currently tracking
3008 and see if one is an identical match to this memory location.
3009 If so, this memory write is dead (remember, we're walking
3010 backwards from the end of the block to the start. */
3011 temp = mem_set_list;
3014 if (rtx_equal_p (XEXP (temp, 0), r))
3016 temp = XEXP (temp, 1);
3020 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3021 || GET_CODE (r) == ZERO_EXTRACT)
3024 if (GET_CODE (r) == REG)
3026 int regno = REGNO (r);
3028 /* Don't delete insns to set global regs. */
3029 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3030 /* Make sure insns to set frame pointer aren't deleted. */
3031 || (regno == FRAME_POINTER_REGNUM
3032 && (! reload_completed || frame_pointer_needed))
3033 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3034 || (regno == HARD_FRAME_POINTER_REGNUM
3035 && (! reload_completed || frame_pointer_needed))
3037 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3038 /* Make sure insns to set arg pointer are never deleted
3039 (if the arg pointer isn't fixed, there will be a USE for
3040 it, so we can treat it normally). */
3041 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3043 || REGNO_REG_SET_P (needed, regno))
3046 /* If this is a hard register, verify that subsequent words are
3048 if (regno < FIRST_PSEUDO_REGISTER)
3050 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3053 if (REGNO_REG_SET_P (needed, regno+n))
3061 /* If performing several activities,
3062 insn is dead if each activity is individually dead.
3063 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3064 that's inside a PARALLEL doesn't make the insn worth keeping. */
3065 else if (code == PARALLEL)
3067 int i = XVECLEN (x, 0);
3069 for (i--; i >= 0; i--)
3070 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3071 && GET_CODE (XVECEXP (x, 0, i)) != USE
3072 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3078 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3079 is not necessarily true for hard registers. */
3080 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3081 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3082 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3085 /* We do not check other CLOBBER or USE here. An insn consisting of just
3086 a CLOBBER or just a USE should not be deleted. */
3090 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3091 return 1 if the entire library call is dead.
3092 This is true if X copies a register (hard or pseudo)
3093 and if the hard return reg of the call insn is dead.
3094 (The caller should have tested the destination of X already for death.)
3096 If this insn doesn't just copy a register, then we don't
3097 have an ordinary libcall. In that case, cse could not have
3098 managed to substitute the source for the dest later on,
3099 so we can assume the libcall is dead.
3101 NEEDED is the bit vector of pseudoregs live before this insn.
3102 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3105 libcall_dead_p (x, needed, note, insn)
3111 register RTX_CODE code = GET_CODE (x);
3115 register rtx r = SET_SRC (x);
3116 if (GET_CODE (r) == REG)
3118 rtx call = XEXP (note, 0);
3122 /* Find the call insn. */
3123 while (call != insn && GET_CODE (call) != CALL_INSN)
3124 call = NEXT_INSN (call);
3126 /* If there is none, do nothing special,
3127 since ordinary death handling can understand these insns. */
3131 /* See if the hard reg holding the value is dead.
3132 If this is a PARALLEL, find the call within it. */
3133 call_pat = PATTERN (call);
3134 if (GET_CODE (call_pat) == PARALLEL)
3136 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3137 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3138 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3141 /* This may be a library call that is returning a value
3142 via invisible pointer. Do nothing special, since
3143 ordinary death handling can understand these insns. */
3147 call_pat = XVECEXP (call_pat, 0, i);
3150 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3156 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3157 live at function entry. Don't count global register variables, variables
3158 in registers that can be used for function arg passing, or variables in
3159 fixed hard registers. */
3162 regno_uninitialized (regno)
3165 if (n_basic_blocks == 0
3166 || (regno < FIRST_PSEUDO_REGISTER
3167 && (global_regs[regno]
3168 || fixed_regs[regno]
3169 || FUNCTION_ARG_REGNO_P (regno))))
3172 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3175 /* 1 if register REGNO was alive at a place where `setjmp' was called
3176 and was set more than once or is an argument.
3177 Such regs may be clobbered by `longjmp'. */
3180 regno_clobbered_at_setjmp (regno)
3183 if (n_basic_blocks == 0)
3186 return ((REG_N_SETS (regno) > 1
3187 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3188 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3191 /* INSN references memory, possibly using autoincrement addressing modes.
3192 Find any entries on the mem_set_list that need to be invalidated due
3193 to an address change. */
3195 invalidate_mems_from_autoinc (insn)
3198 rtx note = REG_NOTES (insn);
3199 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3201 if (REG_NOTE_KIND (note) == REG_INC)
3203 rtx temp = mem_set_list;
3204 rtx prev = NULL_RTX;
3208 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3210 /* Splice temp out of list. */
3212 XEXP (prev, 1) = XEXP (temp, 1);
3214 mem_set_list = XEXP (temp, 1);
3218 temp = XEXP (temp, 1);
3224 /* Process the registers that are set within X.
3225 Their bits are set to 1 in the regset DEAD,
3226 because they are dead prior to this insn.
3228 If INSN is nonzero, it is the insn being processed
3229 and the fact that it is nonzero implies this is the FINAL pass
3230 in propagate_block. In this case, various info about register
3231 usage is stored, LOG_LINKS fields of insns are set up. */
3234 mark_set_regs (needed, dead, x, insn, significant)
3241 register RTX_CODE code = GET_CODE (x);
3243 if (code == SET || code == CLOBBER)
3244 mark_set_1 (needed, dead, x, insn, significant);
3245 else if (code == PARALLEL)
3248 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3250 code = GET_CODE (XVECEXP (x, 0, i));
3251 if (code == SET || code == CLOBBER)
3252 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3257 /* Process a single SET rtx, X. */
3260 mark_set_1 (needed, dead, x, insn, significant)
3268 register rtx reg = SET_DEST (x);
3270 /* Some targets place small structures in registers for
3271 return values of functions. We have to detect this
3272 case specially here to get correct flow information. */
3273 if (GET_CODE (reg) == PARALLEL
3274 && GET_MODE (reg) == BLKmode)
3278 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3279 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3283 /* Modifying just one hardware register of a multi-reg value
3284 or just a byte field of a register
3285 does not mean the value from before this insn is now dead.
3286 But it does mean liveness of that register at the end of the block
3289 Within mark_set_1, however, we treat it as if the register is
3290 indeed modified. mark_used_regs will, however, also treat this
3291 register as being used. Thus, we treat these insns as setting a
3292 new value for the register as a function of its old value. This
3293 cases LOG_LINKS to be made appropriately and this will help combine. */
3295 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3296 || GET_CODE (reg) == SIGN_EXTRACT
3297 || GET_CODE (reg) == STRICT_LOW_PART)
3298 reg = XEXP (reg, 0);
3300 /* If this set is a MEM, then it kills any aliased writes.
3301 If this set is a REG, then it kills any MEMs which use the reg. */
3302 if (GET_CODE (reg) == MEM
3303 || GET_CODE (reg) == REG)
3305 rtx temp = mem_set_list;
3306 rtx prev = NULL_RTX;
3310 if ((GET_CODE (reg) == MEM
3311 && output_dependence (XEXP (temp, 0), reg))
3312 || (GET_CODE (reg) == REG
3313 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3315 /* Splice this entry out of the list. */
3317 XEXP (prev, 1) = XEXP (temp, 1);
3319 mem_set_list = XEXP (temp, 1);
3323 temp = XEXP (temp, 1);
3327 /* If the memory reference had embedded side effects (autoincrement
3328 address modes. Then we may need to kill some entries on the
3330 if (insn && GET_CODE (reg) == MEM)
3331 invalidate_mems_from_autoinc (insn);
3333 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3334 /* We do not know the size of a BLKmode store, so we do not track
3335 them for redundant store elimination. */
3336 && GET_MODE (reg) != BLKmode
3337 /* There are no REG_INC notes for SP, so we can't assume we'll see
3338 everything that invalidates it. To be safe, don't eliminate any
3339 stores though SP; none of them should be redundant anyway. */
3340 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3341 mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3343 if (GET_CODE (reg) == REG
3344 && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3345 && (! reload_completed || frame_pointer_needed)))
3346 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3347 && ! (regno == HARD_FRAME_POINTER_REGNUM
3348 && (! reload_completed || frame_pointer_needed))
3350 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3351 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3353 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3354 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3356 int some_needed = REGNO_REG_SET_P (needed, regno);
3357 int some_not_needed = ! some_needed;
3359 /* Mark it as a significant register for this basic block. */
3361 SET_REGNO_REG_SET (significant, regno);
3363 /* Mark it as dead before this insn. */
3364 SET_REGNO_REG_SET (dead, regno);
3366 /* A hard reg in a wide mode may really be multiple registers.
3367 If so, mark all of them just like the first. */
3368 if (regno < FIRST_PSEUDO_REGISTER)
3372 /* Nothing below is needed for the stack pointer; get out asap.
3373 Eg, log links aren't needed, since combine won't use them. */
3374 if (regno == STACK_POINTER_REGNUM)
3377 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3380 int regno_n = regno + n;
3381 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3383 SET_REGNO_REG_SET (significant, regno_n);
3385 SET_REGNO_REG_SET (dead, regno_n);
3386 some_needed |= needed_regno;
3387 some_not_needed |= ! needed_regno;
3390 /* Additional data to record if this is the final pass. */
3393 register rtx y = reg_next_use[regno];
3394 register int blocknum = BLOCK_NUM (insn);
3396 /* If this is a hard reg, record this function uses the reg. */
3398 if (regno < FIRST_PSEUDO_REGISTER)
3401 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3403 for (i = regno; i < endregno; i++)
3405 /* The next use is no longer "next", since a store
3407 reg_next_use[i] = 0;
3409 regs_ever_live[i] = 1;
3415 /* The next use is no longer "next", since a store
3417 reg_next_use[regno] = 0;
3419 /* Keep track of which basic blocks each reg appears in. */
3421 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3422 REG_BASIC_BLOCK (regno) = blocknum;
3423 else if (REG_BASIC_BLOCK (regno) != blocknum)
3424 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3426 /* Count (weighted) references, stores, etc. This counts a
3427 register twice if it is modified, but that is correct. */
3428 REG_N_SETS (regno)++;
3430 REG_N_REFS (regno) += loop_depth;
3432 /* The insns where a reg is live are normally counted
3433 elsewhere, but we want the count to include the insn
3434 where the reg is set, and the normal counting mechanism
3435 would not count it. */
3436 REG_LIVE_LENGTH (regno)++;
3439 if (! some_not_needed)
3441 /* Make a logical link from the next following insn
3442 that uses this register, back to this insn.
3443 The following insns have already been processed.
3445 We don't build a LOG_LINK for hard registers containing
3446 in ASM_OPERANDs. If these registers get replaced,
3447 we might wind up changing the semantics of the insn,
3448 even if reload can make what appear to be valid assignments
3450 if (y && (BLOCK_NUM (y) == blocknum)
3451 && (regno >= FIRST_PSEUDO_REGISTER
3452 || asm_noperands (PATTERN (y)) < 0))
3454 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3456 else if (! some_needed)
3458 /* Note that dead stores have already been deleted when possible
3459 If we get here, we have found a dead store that cannot
3460 be eliminated (because the same insn does something useful).
3461 Indicate this by marking the reg being set as dying here. */
3463 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3464 REG_N_DEATHS (REGNO (reg))++;
3468 /* This is a case where we have a multi-word hard register
3469 and some, but not all, of the words of the register are
3470 needed in subsequent insns. Write REG_UNUSED notes
3471 for those parts that were not needed. This case should
3476 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3478 if (!REGNO_REG_SET_P (needed, regno + i))
3480 = gen_rtx_EXPR_LIST (REG_UNUSED,
3481 gen_rtx_REG (reg_raw_mode[regno + i],
3487 else if (GET_CODE (reg) == REG)
3488 reg_next_use[regno] = 0;
3490 /* If this is the last pass and this is a SCRATCH, show it will be dying
3491 here and count it. */
3492 else if (GET_CODE (reg) == SCRATCH && insn != 0)
3495 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3501 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3505 find_auto_inc (needed, x, insn)
3510 rtx addr = XEXP (x, 0);
3511 HOST_WIDE_INT offset = 0;
3514 /* Here we detect use of an index register which might be good for
3515 postincrement, postdecrement, preincrement, or predecrement. */
3517 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3518 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3520 if (GET_CODE (addr) == REG)
3523 register int size = GET_MODE_SIZE (GET_MODE (x));
3526 int regno = REGNO (addr);
3528 /* Is the next use an increment that might make auto-increment? */
3529 if ((incr = reg_next_use[regno]) != 0
3530 && (set = single_set (incr)) != 0
3531 && GET_CODE (set) == SET
3532 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3533 /* Can't add side effects to jumps; if reg is spilled and
3534 reloaded, there's no way to store back the altered value. */
3535 && GET_CODE (insn) != JUMP_INSN
3536 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3537 && XEXP (y, 0) == addr
3538 && GET_CODE (XEXP (y, 1)) == CONST_INT
3539 && ((HAVE_POST_INCREMENT
3540 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3541 || (HAVE_POST_DECREMENT
3542 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3543 || (HAVE_PRE_INCREMENT
3544 && (INTVAL (XEXP (y, 1)) == size && offset == size))
3545 || (HAVE_PRE_DECREMENT
3546 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3547 /* Make sure this reg appears only once in this insn. */
3548 && (use = find_use_as_address (PATTERN (insn), addr, offset),
3549 use != 0 && use != (rtx) 1))
3551 rtx q = SET_DEST (set);
3552 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3553 ? (offset ? PRE_INC : POST_INC)
3554 : (offset ? PRE_DEC : POST_DEC));
3556 if (dead_or_set_p (incr, addr))
3558 /* This is the simple case. Try to make the auto-inc. If
3559 we can't, we are done. Otherwise, we will do any
3560 needed updates below. */
3561 if (! validate_change (insn, &XEXP (x, 0),
3562 gen_rtx_fmt_e (inc_code, Pmode, addr),
3566 else if (GET_CODE (q) == REG
3567 /* PREV_INSN used here to check the semi-open interval
3569 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3570 /* We must also check for sets of q as q may be
3571 a call clobbered hard register and there may
3572 be a call between PREV_INSN (insn) and incr. */
3573 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3575 /* We have *p followed sometime later by q = p+size.
3576 Both p and q must be live afterward,
3577 and q is not used between INSN and its assignment.
3578 Change it to q = p, ...*q..., q = q+size.
3579 Then fall into the usual case. */
3584 emit_move_insn (q, addr);
3585 insns = get_insns ();
3588 bb = BLOCK_FOR_INSN (insn);
3589 for (temp = insns; temp; temp = NEXT_INSN (temp))
3590 set_block_for_insn (temp, bb);
3592 /* If we can't make the auto-inc, or can't make the
3593 replacement into Y, exit. There's no point in making
3594 the change below if we can't do the auto-inc and doing
3595 so is not correct in the pre-inc case. */
3597 validate_change (insn, &XEXP (x, 0),
3598 gen_rtx_fmt_e (inc_code, Pmode, q),
3600 validate_change (incr, &XEXP (y, 0), q, 1);
3601 if (! apply_change_group ())
3604 /* We now know we'll be doing this change, so emit the
3605 new insn(s) and do the updates. */
3606 emit_insns_before (insns, insn);
3608 if (BLOCK_FOR_INSN (insn)->head == insn)
3609 BLOCK_FOR_INSN (insn)->head = insns;
3611 /* INCR will become a NOTE and INSN won't contain a
3612 use of ADDR. If a use of ADDR was just placed in
3613 the insn before INSN, make that the next use.
3614 Otherwise, invalidate it. */
3615 if (GET_CODE (PREV_INSN (insn)) == INSN
3616 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3617 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3618 reg_next_use[regno] = PREV_INSN (insn);
3620 reg_next_use[regno] = 0;
3625 /* REGNO is now used in INCR which is below INSN, but
3626 it previously wasn't live here. If we don't mark
3627 it as needed, we'll put a REG_DEAD note for it
3628 on this insn, which is incorrect. */
3629 SET_REGNO_REG_SET (needed, regno);
3631 /* If there are any calls between INSN and INCR, show
3632 that REGNO now crosses them. */
3633 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3634 if (GET_CODE (temp) == CALL_INSN)
3635 REG_N_CALLS_CROSSED (regno)++;
3640 /* If we haven't returned, it means we were able to make the
3641 auto-inc, so update the status. First, record that this insn
3642 has an implicit side effect. */
3645 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3647 /* Modify the old increment-insn to simply copy
3648 the already-incremented value of our register. */
3649 if (! validate_change (incr, &SET_SRC (set), addr, 0))
3652 /* If that makes it a no-op (copying the register into itself) delete
3653 it so it won't appear to be a "use" and a "set" of this
3655 if (SET_DEST (set) == addr)
3657 PUT_CODE (incr, NOTE);
3658 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3659 NOTE_SOURCE_FILE (incr) = 0;
3662 if (regno >= FIRST_PSEUDO_REGISTER)
3664 /* Count an extra reference to the reg. When a reg is
3665 incremented, spilling it is worse, so we want to make
3666 that less likely. */
3667 REG_N_REFS (regno) += loop_depth;
3669 /* Count the increment as a setting of the register,
3670 even though it isn't a SET in rtl. */
3671 REG_N_SETS (regno)++;
3676 #endif /* AUTO_INC_DEC */
3678 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3679 This is done assuming the registers needed from X
3680 are those that have 1-bits in NEEDED.
3682 On the final pass, FINAL is 1. This means try for autoincrement
3683 and count the uses and deaths of each pseudo-reg.
3685 INSN is the containing instruction. If INSN is dead, this function is not
3689 mark_used_regs (needed, live, x, final, insn)
3696 register RTX_CODE code;
3701 code = GET_CODE (x);
3721 /* If we are clobbering a MEM, mark any registers inside the address
3723 if (GET_CODE (XEXP (x, 0)) == MEM)
3724 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3728 /* Invalidate the data for the last MEM stored, but only if MEM is
3729 something that can be stored into. */
3730 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3731 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3732 ; /* needn't clear the memory set list */
3735 rtx temp = mem_set_list;
3736 rtx prev = NULL_RTX;
3740 if (anti_dependence (XEXP (temp, 0), x))
3742 /* Splice temp out of the list. */
3744 XEXP (prev, 1) = XEXP (temp, 1);
3746 mem_set_list = XEXP (temp, 1);
3750 temp = XEXP (temp, 1);
3754 /* If the memory reference had embedded side effects (autoincrement
3755 address modes. Then we may need to kill some entries on the
3758 invalidate_mems_from_autoinc (insn);
3762 find_auto_inc (needed, x, insn);
3767 if (GET_CODE (SUBREG_REG (x)) == REG
3768 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3769 && (GET_MODE_SIZE (GET_MODE (x))
3770 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3771 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3773 /* While we're here, optimize this case. */
3776 /* In case the SUBREG is not of a register, don't optimize */
3777 if (GET_CODE (x) != REG)
3779 mark_used_regs (needed, live, x, final, insn);
3783 /* ... fall through ... */
3786 /* See a register other than being set
3787 => mark it as needed. */
3791 int some_needed = REGNO_REG_SET_P (needed, regno);
3792 int some_not_needed = ! some_needed;
3794 SET_REGNO_REG_SET (live, regno);
3796 /* A hard reg in a wide mode may really be multiple registers.
3797 If so, mark all of them just like the first. */
3798 if (regno < FIRST_PSEUDO_REGISTER)
3802 /* For stack ptr or fixed arg pointer,
3803 nothing below can be necessary, so waste no more time. */
3804 if (regno == STACK_POINTER_REGNUM
3805 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3806 || (regno == HARD_FRAME_POINTER_REGNUM
3807 && (! reload_completed || frame_pointer_needed))
3809 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3810 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3812 || (regno == FRAME_POINTER_REGNUM
3813 && (! reload_completed || frame_pointer_needed)))
3815 /* If this is a register we are going to try to eliminate,
3816 don't mark it live here. If we are successful in
3817 eliminating it, it need not be live unless it is used for
3818 pseudos, in which case it will have been set live when
3819 it was allocated to the pseudos. If the register will not
3820 be eliminated, reload will set it live at that point. */
3822 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3823 regs_ever_live[regno] = 1;
3826 /* No death notes for global register variables;
3827 their values are live after this function exits. */
3828 if (global_regs[regno])
3831 reg_next_use[regno] = insn;
3835 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3838 int regno_n = regno + n;
3839 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3841 SET_REGNO_REG_SET (live, regno_n);
3842 some_needed |= needed_regno;
3843 some_not_needed |= ! needed_regno;
3848 /* Record where each reg is used, so when the reg
3849 is set we know the next insn that uses it. */
3851 reg_next_use[regno] = insn;
3853 if (regno < FIRST_PSEUDO_REGISTER)
3855 /* If a hard reg is being used,
3856 record that this function does use it. */
3858 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3862 regs_ever_live[regno + --i] = 1;
3867 /* Keep track of which basic block each reg appears in. */
3869 register int blocknum = BLOCK_NUM (insn);
3871 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3872 REG_BASIC_BLOCK (regno) = blocknum;
3873 else if (REG_BASIC_BLOCK (regno) != blocknum)
3874 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3876 /* Count (weighted) number of uses of each reg. */
3878 REG_N_REFS (regno) += loop_depth;
3881 /* Record and count the insns in which a reg dies.
3882 If it is used in this insn and was dead below the insn
3883 then it dies in this insn. If it was set in this insn,
3884 we do not make a REG_DEAD note; likewise if we already
3885 made such a note. */
3888 && ! dead_or_set_p (insn, x)
3890 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3894 /* Check for the case where the register dying partially
3895 overlaps the register set by this insn. */
3896 if (regno < FIRST_PSEUDO_REGISTER
3897 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3899 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3901 some_needed |= dead_or_set_regno_p (insn, regno + n);
3904 /* If none of the words in X is needed, make a REG_DEAD
3905 note. Otherwise, we must make partial REG_DEAD notes. */
3909 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3910 REG_N_DEATHS (regno)++;
3916 /* Don't make a REG_DEAD note for a part of a register
3917 that is set in the insn. */
3919 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3921 if (!REGNO_REG_SET_P (needed, regno + i)
3922 && ! dead_or_set_regno_p (insn, regno + i))
3924 = gen_rtx_EXPR_LIST (REG_DEAD,
3925 gen_rtx_REG (reg_raw_mode[regno + i],
3936 register rtx testreg = SET_DEST (x);
3939 /* If storing into MEM, don't show it as being used. But do
3940 show the address as being used. */
3941 if (GET_CODE (testreg) == MEM)
3945 find_auto_inc (needed, testreg, insn);
3947 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3948 mark_used_regs (needed, live, SET_SRC (x), final, insn);
3952 /* Storing in STRICT_LOW_PART is like storing in a reg
3953 in that this SET might be dead, so ignore it in TESTREG.
3954 but in some other ways it is like using the reg.
3956 Storing in a SUBREG or a bit field is like storing the entire
3957 register in that if the register's value is not used
3958 then this SET is not needed. */
3959 while (GET_CODE (testreg) == STRICT_LOW_PART
3960 || GET_CODE (testreg) == ZERO_EXTRACT
3961 || GET_CODE (testreg) == SIGN_EXTRACT
3962 || GET_CODE (testreg) == SUBREG)
3964 if (GET_CODE (testreg) == SUBREG
3965 && GET_CODE (SUBREG_REG (testreg)) == REG
3966 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3967 && (GET_MODE_SIZE (GET_MODE (testreg))
3968 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3969 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3971 /* Modifying a single register in an alternate mode
3972 does not use any of the old value. But these other
3973 ways of storing in a register do use the old value. */
3974 if (GET_CODE (testreg) == SUBREG
3975 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3980 testreg = XEXP (testreg, 0);
3983 /* If this is a store into a register,
3984 recursively scan the value being stored. */
3986 if ((GET_CODE (testreg) == PARALLEL
3987 && GET_MODE (testreg) == BLKmode)
3988 || (GET_CODE (testreg) == REG
3989 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3990 && (! reload_completed || frame_pointer_needed)))
3991 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3992 && ! (regno == HARD_FRAME_POINTER_REGNUM
3993 && (! reload_completed || frame_pointer_needed))
3995 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3996 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3999 /* We used to exclude global_regs here, but that seems wrong.
4000 Storing in them is like storing in mem. */
4002 mark_used_regs (needed, live, SET_SRC (x), final, insn);
4004 mark_used_regs (needed, live, SET_DEST (x), final, insn);
4011 /* If exiting needs the right stack value, consider this insn as
4012 using the stack pointer. In any event, consider it as using
4013 all global registers and all registers used by return. */
4014 if (! EXIT_IGNORE_STACK
4015 || (! FRAME_POINTER_REQUIRED
4016 && ! current_function_calls_alloca
4017 && flag_omit_frame_pointer)
4018 || current_function_sp_is_unchanging)
4019 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
4021 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4023 #ifdef EPILOGUE_USES
4024 || EPILOGUE_USES (i)
4027 SET_REGNO_REG_SET (live, i);
4031 case UNSPEC_VOLATILE:
4035 /* Traditional and volatile asm instructions must be considered to use
4036 and clobber all hard registers, all pseudo-registers and all of
4037 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4039 Consider for instance a volatile asm that changes the fpu rounding
4040 mode. An insn should not be moved across this even if it only uses
4041 pseudo-regs because it might give an incorrectly rounded result.
4043 ?!? Unfortunately, marking all hard registers as live causes massive
4044 problems for the register allocator and marking all pseudos as live
4045 creates mountains of uninitialized variable warnings.
4047 So for now, just clear the memory set list and mark any regs
4048 we can find in ASM_OPERANDS as used. */
4049 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4050 mem_set_list = NULL_RTX;
4052 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4053 We can not just fall through here since then we would be confused
4054 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4055 traditional asms unlike their normal usage. */
4056 if (code == ASM_OPERANDS)
4060 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4061 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4072 /* Recursively scan the operands of this expression. */
4075 register char *fmt = GET_RTX_FORMAT (code);
4078 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4082 /* Tail recursive case: save a function call level. */
4088 mark_used_regs (needed, live, XEXP (x, i), final, insn);
4090 else if (fmt[i] == 'E')
4093 for (j = 0; j < XVECLEN (x, i); j++)
4094 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4103 try_pre_increment_1 (insn)
4106 /* Find the next use of this reg. If in same basic block,
4107 make it do pre-increment or pre-decrement if appropriate. */
4108 rtx x = single_set (insn);
4109 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4110 * INTVAL (XEXP (SET_SRC (x), 1)));
4111 int regno = REGNO (SET_DEST (x));
4112 rtx y = reg_next_use[regno];
4114 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4115 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4116 mode would be better. */
4117 && ! dead_or_set_p (y, SET_DEST (x))
4118 && try_pre_increment (y, SET_DEST (x), amount))
4120 /* We have found a suitable auto-increment
4121 and already changed insn Y to do it.
4122 So flush this increment-instruction. */
4123 PUT_CODE (insn, NOTE);
4124 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4125 NOTE_SOURCE_FILE (insn) = 0;
4126 /* Count a reference to this reg for the increment
4127 insn we are deleting. When a reg is incremented.
4128 spilling it is worse, so we want to make that
4130 if (regno >= FIRST_PSEUDO_REGISTER)
4132 REG_N_REFS (regno) += loop_depth;
4133 REG_N_SETS (regno)++;
4140 /* Try to change INSN so that it does pre-increment or pre-decrement
4141 addressing on register REG in order to add AMOUNT to REG.
4142 AMOUNT is negative for pre-decrement.
4143 Returns 1 if the change could be made.
4144 This checks all about the validity of the result of modifying INSN. */
4147 try_pre_increment (insn, reg, amount)
4149 HOST_WIDE_INT amount;
4153 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4154 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4156 /* Nonzero if we can try to make a post-increment or post-decrement.
4157 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4158 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4159 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4162 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4165 /* From the sign of increment, see which possibilities are conceivable
4166 on this target machine. */
4167 if (HAVE_PRE_INCREMENT && amount > 0)
4169 if (HAVE_POST_INCREMENT && amount > 0)
4172 if (HAVE_PRE_DECREMENT && amount < 0)
4174 if (HAVE_POST_DECREMENT && amount < 0)
4177 if (! (pre_ok || post_ok))
4180 /* It is not safe to add a side effect to a jump insn
4181 because if the incremented register is spilled and must be reloaded
4182 there would be no way to store the incremented value back in memory. */
4184 if (GET_CODE (insn) == JUMP_INSN)
4189 use = find_use_as_address (PATTERN (insn), reg, 0);
4190 if (post_ok && (use == 0 || use == (rtx) 1))
4192 use = find_use_as_address (PATTERN (insn), reg, -amount);
4196 if (use == 0 || use == (rtx) 1)
4199 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4202 /* See if this combination of instruction and addressing mode exists. */
4203 if (! validate_change (insn, &XEXP (use, 0),
4204 gen_rtx_fmt_e (amount > 0
4205 ? (do_post ? POST_INC : PRE_INC)
4206 : (do_post ? POST_DEC : PRE_DEC),
4210 /* Record that this insn now has an implicit side effect on X. */
4211 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4215 #endif /* AUTO_INC_DEC */
4217 /* Find the place in the rtx X where REG is used as a memory address.
4218 Return the MEM rtx that so uses it.
4219 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4220 (plus REG (const_int PLUSCONST)).
4222 If such an address does not appear, return 0.
4223 If REG appears more than once, or is used other than in such an address,
4227 find_use_as_address (x, reg, plusconst)
4230 HOST_WIDE_INT plusconst;
4232 enum rtx_code code = GET_CODE (x);
4233 char *fmt = GET_RTX_FORMAT (code);
4235 register rtx value = 0;
4238 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4241 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4242 && XEXP (XEXP (x, 0), 0) == reg
4243 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4244 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4247 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4249 /* If REG occurs inside a MEM used in a bit-field reference,
4250 that is unacceptable. */
4251 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4252 return (rtx) (HOST_WIDE_INT) 1;
4256 return (rtx) (HOST_WIDE_INT) 1;
4258 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4262 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4266 return (rtx) (HOST_WIDE_INT) 1;
4271 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4273 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4277 return (rtx) (HOST_WIDE_INT) 1;
4285 /* Write information about registers and basic blocks into FILE.
4286 This is part of making a debugging dump. */
4289 dump_flow_info (file)
4293 static char *reg_class_names[] = REG_CLASS_NAMES;
4295 fprintf (file, "%d registers.\n", max_regno);
4296 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4299 enum reg_class class, altclass;
4300 fprintf (file, "\nRegister %d used %d times across %d insns",
4301 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4302 if (REG_BASIC_BLOCK (i) >= 0)
4303 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4305 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4306 (REG_N_SETS (i) == 1) ? "" : "s");
4307 if (REG_USERVAR_P (regno_reg_rtx[i]))
4308 fprintf (file, "; user var");
4309 if (REG_N_DEATHS (i) != 1)
4310 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4311 if (REG_N_CALLS_CROSSED (i) == 1)
4312 fprintf (file, "; crosses 1 call");
4313 else if (REG_N_CALLS_CROSSED (i))
4314 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4315 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4316 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4317 class = reg_preferred_class (i);
4318 altclass = reg_alternate_class (i);
4319 if (class != GENERAL_REGS || altclass != ALL_REGS)
4321 if (altclass == ALL_REGS || class == ALL_REGS)
4322 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4323 else if (altclass == NO_REGS)
4324 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4326 fprintf (file, "; pref %s, else %s",
4327 reg_class_names[(int) class],
4328 reg_class_names[(int) altclass]);
4330 if (REGNO_POINTER_FLAG (i))
4331 fprintf (file, "; pointer");
4332 fprintf (file, ".\n");
4335 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4336 for (i = 0; i < n_basic_blocks; i++)
4338 register basic_block bb = BASIC_BLOCK (i);
4342 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4343 i, INSN_UID (bb->head), INSN_UID (bb->end));
4345 fprintf (file, "Predecessors: ");
4346 for (e = bb->pred; e ; e = e->pred_next)
4347 dump_edge_info (file, e, 0);
4349 fprintf (file, "\nSuccessors: ");
4350 for (e = bb->succ; e ; e = e->succ_next)
4351 dump_edge_info (file, e, 1);
4353 fprintf (file, "\nRegisters live at start:");
4354 if (bb->global_live_at_start)
4356 for (regno = 0; regno < max_regno; regno++)
4357 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4358 fprintf (file, " %d", regno);
4361 fprintf (file, " n/a");
4363 fprintf (file, "\nRegisters live at end:");
4364 if (bb->global_live_at_end)
4366 for (regno = 0; regno < max_regno; regno++)
4367 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4368 fprintf (file, " %d", regno);
4371 fprintf (file, " n/a");
4380 dump_edge_info (file, e, do_succ)
4385 basic_block side = (do_succ ? e->dest : e->src);
4387 if (side == ENTRY_BLOCK_PTR)
4388 fputs (" ENTRY", file);
4389 else if (side == EXIT_BLOCK_PTR)
4390 fputs (" EXIT", file);
4392 fprintf (file, " %d", side->index);
4396 static char * bitnames[] = {
4397 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4400 int i, flags = e->flags;
4404 for (i = 0; flags; i++)
4405 if (flags & (1 << i))
4411 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4412 fputs (bitnames[i], file);
4414 fprintf (file, "%d", i);
4422 /* Like print_rtl, but also print out live information for the start of each
4426 print_rtl_with_bb (outf, rtx_first)
4430 register rtx tmp_rtx;
4433 fprintf (outf, "(nil)\n");
4437 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4438 int max_uid = get_max_uid ();
4439 basic_block *start = (basic_block *)
4440 alloca (max_uid * sizeof (basic_block));
4441 basic_block *end = (basic_block *)
4442 alloca (max_uid * sizeof (basic_block));
4443 enum bb_state *in_bb_p = (enum bb_state *)
4444 alloca (max_uid * sizeof (enum bb_state));
4446 memset (start, 0, max_uid * sizeof (basic_block));
4447 memset (end, 0, max_uid * sizeof (basic_block));
4448 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4450 for (i = n_basic_blocks - 1; i >= 0; i--)
4452 basic_block bb = BASIC_BLOCK (i);
4455 start[INSN_UID (bb->head)] = bb;
4456 end[INSN_UID (bb->end)] = bb;
4457 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4459 enum bb_state state = IN_MULTIPLE_BB;
4460 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4462 in_bb_p[INSN_UID(x)] = state;
4469 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4474 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4476 fprintf (outf, ";; Start of basic block %d, registers live:",
4479 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4481 fprintf (outf, " %d", i);
4482 if (i < FIRST_PSEUDO_REGISTER)
4483 fprintf (outf, " [%s]",
4489 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4490 && GET_CODE (tmp_rtx) != NOTE
4491 && GET_CODE (tmp_rtx) != BARRIER
4493 fprintf (outf, ";; Insn is not within a basic block\n");
4494 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4495 fprintf (outf, ";; Insn is in multiple basic blocks\n");
4497 did_output = print_rtl_single (outf, tmp_rtx);
4499 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4500 fprintf (outf, ";; End of basic block %d\n", bb->index);
4509 /* Integer list support. */
4511 /* Allocate a node from list *HEAD_PTR. */
4514 alloc_int_list_node (head_ptr)
4515 int_list_block **head_ptr;
4517 struct int_list_block *first_blk = *head_ptr;
4519 if (first_blk == NULL || first_blk->nodes_left <= 0)
4521 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4522 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4523 first_blk->next = *head_ptr;
4524 *head_ptr = first_blk;
4527 first_blk->nodes_left--;
4528 return &first_blk->nodes[first_blk->nodes_left];
4531 /* Pointer to head of predecessor/successor block list. */
4532 static int_list_block *pred_int_list_blocks;
4534 /* Add a new node to integer list LIST with value VAL.
4535 LIST is a pointer to a list object to allow for different implementations.
4536 If *LIST is initially NULL, the list is empty.
4537 The caller must not care whether the element is added to the front or
4538 to the end of the list (to allow for different implementations). */
4541 add_int_list_node (blk_list, list, val)
4542 int_list_block **blk_list;
4546 int_list_ptr p = alloc_int_list_node (blk_list);
4554 /* Free the blocks of lists at BLK_LIST. */
4557 free_int_list (blk_list)
4558 int_list_block **blk_list;
4560 int_list_block *p, *next;
4562 for (p = *blk_list; p != NULL; p = next)
4568 /* Mark list as empty for the next function we compile. */
4572 /* Predecessor/successor computation. */
4574 /* Mark PRED_BB a precessor of SUCC_BB,
4575 and conversely SUCC_BB a successor of PRED_BB. */
4578 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4581 int_list_ptr *s_preds;
4582 int_list_ptr *s_succs;
4586 if (succ_bb != EXIT_BLOCK)
4588 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4589 num_preds[succ_bb]++;
4591 if (pred_bb != ENTRY_BLOCK)
4593 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4594 num_succs[pred_bb]++;
4598 /* Convert edge lists into pred/succ lists for backward compatibility. */
4601 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4602 int_list_ptr *s_preds;
4603 int_list_ptr *s_succs;
4607 int i, n = n_basic_blocks;
4610 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4611 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4612 memset (num_preds, 0, n_basic_blocks * sizeof (int));
4613 memset (num_succs, 0, n_basic_blocks * sizeof (int));
4615 for (i = 0; i < n; ++i)
4617 basic_block bb = BASIC_BLOCK (i);
4619 for (e = bb->succ; e ; e = e->succ_next)
4620 add_pred_succ (i, e->dest->index, s_preds, s_succs,
4621 num_preds, num_succs);
4624 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4625 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4626 num_preds, num_succs);
4630 dump_bb_data (file, preds, succs, live_info)
4632 int_list_ptr *preds;
4633 int_list_ptr *succs;
4639 fprintf (file, "BB data\n\n");
4640 for (bb = 0; bb < n_basic_blocks; bb++)
4642 fprintf (file, "BB %d, start %d, end %d\n", bb,
4643 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4644 fprintf (file, " preds:");
4645 for (p = preds[bb]; p != NULL; p = p->next)
4647 int pred_bb = INT_LIST_VAL (p);
4648 if (pred_bb == ENTRY_BLOCK)
4649 fprintf (file, " entry");
4651 fprintf (file, " %d", pred_bb);
4653 fprintf (file, "\n");
4654 fprintf (file, " succs:");
4655 for (p = succs[bb]; p != NULL; p = p->next)
4657 int succ_bb = INT_LIST_VAL (p);
4658 if (succ_bb == EXIT_BLOCK)
4659 fprintf (file, " exit");
4661 fprintf (file, " %d", succ_bb);
4666 fprintf (file, "\nRegisters live at start:");
4667 for (regno = 0; regno < max_regno; regno++)
4668 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4669 fprintf (file, " %d", regno);
4670 fprintf (file, "\n");
4672 fprintf (file, "\n");
4674 fprintf (file, "\n");
4677 /* Free basic block data storage. */
4682 free_int_list (&pred_int_list_blocks);
4685 /* Compute dominator relationships. */
4687 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4688 sbitmap *dominators;
4689 sbitmap *post_dominators;
4690 int_list_ptr *s_preds;
4691 int_list_ptr *s_succs;
4693 int bb, changed, passes;
4694 sbitmap *temp_bitmap;
4696 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4697 sbitmap_vector_ones (dominators, n_basic_blocks);
4698 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4699 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4701 sbitmap_zero (dominators[0]);
4702 SET_BIT (dominators[0], 0);
4704 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4705 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4712 for (bb = 1; bb < n_basic_blocks; bb++)
4714 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4716 SET_BIT (temp_bitmap[bb], bb);
4717 changed |= sbitmap_a_and_b (dominators[bb],
4720 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4722 SET_BIT (temp_bitmap[bb], bb);
4723 changed |= sbitmap_a_and_b (post_dominators[bb],
4724 post_dominators[bb],
4733 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
4736 compute_immediate_dominators (idom, dominators)
4738 sbitmap *dominators;
4743 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4745 /* Begin with tmp(n) = dom(n) - { n }. */
4746 for (b = n_basic_blocks; --b >= 0; )
4748 sbitmap_copy (tmp[b], dominators[b]);
4749 RESET_BIT (tmp[b], b);
4752 /* Subtract out all of our dominator's dominators. */
4753 for (b = n_basic_blocks; --b >= 0; )
4755 sbitmap tmp_b = tmp[b];
4758 for (s = n_basic_blocks; --s >= 0; )
4759 if (TEST_BIT (tmp_b, s))
4760 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4763 /* Find the one bit set in the bitmap and put it in the output array. */
4764 for (b = n_basic_blocks; --b >= 0; )
4767 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4770 sbitmap_vector_free (tmp);
4773 /* Count for a single SET rtx, X. */
4776 count_reg_sets_1 (x)
4780 register rtx reg = SET_DEST (x);
4782 /* Find the register that's set/clobbered. */
4783 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4784 || GET_CODE (reg) == SIGN_EXTRACT
4785 || GET_CODE (reg) == STRICT_LOW_PART)
4786 reg = XEXP (reg, 0);
4788 if (GET_CODE (reg) == PARALLEL
4789 && GET_MODE (reg) == BLKmode)
4792 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4793 count_reg_sets_1 (XVECEXP (reg, 0, i));
4797 if (GET_CODE (reg) == REG)
4799 regno = REGNO (reg);
4800 if (regno >= FIRST_PSEUDO_REGISTER)
4802 /* Count (weighted) references, stores, etc. This counts a
4803 register twice if it is modified, but that is correct. */
4804 REG_N_SETS (regno)++;
4806 REG_N_REFS (regno) += loop_depth;
4811 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4812 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4818 register RTX_CODE code = GET_CODE (x);
4820 if (code == SET || code == CLOBBER)
4821 count_reg_sets_1 (x);
4822 else if (code == PARALLEL)
4825 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4827 code = GET_CODE (XVECEXP (x, 0, i));
4828 if (code == SET || code == CLOBBER)
4829 count_reg_sets_1 (XVECEXP (x, 0, i));
4834 /* Increment REG_N_REFS by the current loop depth each register reference
4838 count_reg_references (x)
4841 register RTX_CODE code;
4844 code = GET_CODE (x);
4864 /* If we are clobbering a MEM, mark any registers inside the address
4866 if (GET_CODE (XEXP (x, 0)) == MEM)
4867 count_reg_references (XEXP (XEXP (x, 0), 0));
4871 /* While we're here, optimize this case. */
4874 /* In case the SUBREG is not of a register, don't optimize */
4875 if (GET_CODE (x) != REG)
4877 count_reg_references (x);
4881 /* ... fall through ... */
4884 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4885 REG_N_REFS (REGNO (x)) += loop_depth;
4890 register rtx testreg = SET_DEST (x);
4893 /* If storing into MEM, don't show it as being used. But do
4894 show the address as being used. */
4895 if (GET_CODE (testreg) == MEM)
4897 count_reg_references (XEXP (testreg, 0));
4898 count_reg_references (SET_SRC (x));
4902 /* Storing in STRICT_LOW_PART is like storing in a reg
4903 in that this SET might be dead, so ignore it in TESTREG.
4904 but in some other ways it is like using the reg.
4906 Storing in a SUBREG or a bit field is like storing the entire
4907 register in that if the register's value is not used
4908 then this SET is not needed. */
4909 while (GET_CODE (testreg) == STRICT_LOW_PART
4910 || GET_CODE (testreg) == ZERO_EXTRACT
4911 || GET_CODE (testreg) == SIGN_EXTRACT
4912 || GET_CODE (testreg) == SUBREG)
4914 /* Modifying a single register in an alternate mode
4915 does not use any of the old value. But these other
4916 ways of storing in a register do use the old value. */
4917 if (GET_CODE (testreg) == SUBREG
4918 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4923 testreg = XEXP (testreg, 0);
4926 /* If this is a store into a register,
4927 recursively scan the value being stored. */
4929 if ((GET_CODE (testreg) == PARALLEL
4930 && GET_MODE (testreg) == BLKmode)
4931 || GET_CODE (testreg) == REG)
4933 count_reg_references (SET_SRC (x));
4935 count_reg_references (SET_DEST (x));
4945 /* Recursively scan the operands of this expression. */
4948 register char *fmt = GET_RTX_FORMAT (code);
4951 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4955 /* Tail recursive case: save a function call level. */
4961 count_reg_references (XEXP (x, i));
4963 else if (fmt[i] == 'E')
4966 for (j = 0; j < XVECLEN (x, i); j++)
4967 count_reg_references (XVECEXP (x, i, j));
4973 /* Recompute register set/reference counts immediately prior to register
4976 This avoids problems with set/reference counts changing to/from values
4977 which have special meanings to the register allocators.
4979 Additionally, the reference counts are the primary component used by the
4980 register allocators to prioritize pseudos for allocation to hard regs.
4981 More accurate reference counts generally lead to better register allocation.
4983 F is the first insn to be scanned.
4984 LOOP_STEP denotes how much loop_depth should be incremented per
4985 loop nesting level in order to increase the ref count more for references
4988 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4989 possibly other information which is used by the register allocators. */
4992 recompute_reg_usage (f, loop_step)
4999 /* Clear out the old data. */
5000 max_reg = max_reg_num ();
5001 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5007 /* Scan each insn in the chain and count how many times each register is
5010 for (insn = f; insn; insn = NEXT_INSN (insn))
5012 /* Keep track of loop depth. */
5013 if (GET_CODE (insn) == NOTE)
5015 /* Look for loop boundaries. */
5016 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5017 loop_depth -= loop_step;
5018 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5019 loop_depth += loop_step;
5021 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5022 Abort now rather than setting register status incorrectly. */
5023 if (loop_depth == 0)
5026 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5030 /* This call will increment REG_N_SETS for each SET or CLOBBER
5031 of a register in INSN. It will also increment REG_N_REFS
5032 by the loop depth for each set of a register in INSN. */
5033 count_reg_sets (PATTERN (insn));
5035 /* count_reg_sets does not detect autoincrement address modes, so
5036 detect them here by looking at the notes attached to INSN. */
5037 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5039 if (REG_NOTE_KIND (links) == REG_INC)
5040 /* Count (weighted) references, stores, etc. This counts a
5041 register twice if it is modified, but that is correct. */
5042 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5045 /* This call will increment REG_N_REFS by the current loop depth for
5046 each reference to a register in INSN. */
5047 count_reg_references (PATTERN (insn));
5049 /* count_reg_references will not include counts for arguments to
5050 function calls, so detect them here by examining the
5051 CALL_INSN_FUNCTION_USAGE data. */
5052 if (GET_CODE (insn) == CALL_INSN)
5056 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5058 note = XEXP (note, 1))
5059 if (GET_CODE (XEXP (note, 0)) == USE)
5060 count_reg_references (SET_DEST (XEXP (note, 0)));
5066 /* Record INSN's block as BB. */
5069 set_block_for_insn (insn, bb)
5073 size_t uid = INSN_UID (insn);
5074 if (uid >= basic_block_for_insn->num_elements)
5078 /* Add one-eighth the size so we don't keep calling xrealloc. */
5079 new_size = uid + (uid + 7) / 8;
5081 VARRAY_GROW (basic_block_for_insn, new_size);
5083 VARRAY_BB (basic_block_for_insn, uid) = bb;
5086 /* Record INSN's block number as BB. */
5087 /* ??? This has got to go. */
5090 set_block_num (insn, bb)
5094 set_block_for_insn (insn, BASIC_BLOCK (bb));
5097 /* Verify the CFG consistency. This function check some CFG invariants and
5098 aborts when something is wrong. Hope that this function will help to
5099 convert many optimization passes to preserve CFG consistent.
5101 Currently it does following checks:
5103 - test head/end pointers
5104 - overlapping of basic blocks
5105 - edge list corectness
5106 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5107 - tails of basic blocks (ensure that boundary is necesary)
5108 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5109 and NOTE_INSN_BASIC_BLOCK
5110 - check that all insns are in the basic blocks
5111 (except the switch handling code, barriers and notes)
5113 In future it can be extended check a lot of other stuff as well
5114 (reachability of basic blocks, life information, etc. etc.). */
5119 const int max_uid = get_max_uid ();
5120 const rtx rtx_first = get_insns ();
5121 basic_block *bb_info;
5125 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5126 memset (bb_info, 0, max_uid * sizeof (basic_block));
5128 /* First pass check head/end pointers and set bb_info array used by
5130 for (i = n_basic_blocks - 1; i >= 0; i--)
5132 basic_block bb = BASIC_BLOCK (i);
5134 /* Check the head pointer and make sure that it is pointing into
5136 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5141 fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n",
5142 INSN_UID (bb->head), bb->index);
5145 /* Check the end pointer and make sure that it is pointing into
5147 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5149 if (bb_info[INSN_UID (x)] != NULL)
5151 fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)",
5152 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5154 bb_info[INSN_UID (x)] = bb;
5161 fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n",
5162 INSN_UID (bb->end), bb->index);
5166 /* Now check the basic blocks (boundaries etc.) */
5167 for (i = n_basic_blocks - 1; i >= 0; i--)
5169 basic_block bb = BASIC_BLOCK (i);
5170 /* Check corectness of edge lists */
5178 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5180 fprintf (stderr, "Predecessor: ");
5181 dump_edge_info (stderr, e, 0);
5182 fprintf (stderr, "\nSuccessor: ");
5183 dump_edge_info (stderr, e, 1);
5187 if (e->dest != EXIT_BLOCK_PTR)
5189 edge e2 = e->dest->pred;
5190 while (e2 && e2 != e)
5194 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5206 fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n",
5208 fprintf (stderr, "Predecessor: ");
5209 dump_edge_info (stderr, e, 0);
5210 fprintf (stderr, "\nSuccessor: ");
5211 dump_edge_info (stderr, e, 1);
5215 if (e->src != ENTRY_BLOCK_PTR)
5217 edge e2 = e->src->succ;
5218 while (e2 && e2 != e)
5222 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n",
5229 /* OK pointers are correct. Now check the header of basic
5230 block. It ought to contain optional CODE_LABEL followed
5231 by NOTE_BASIC_BLOCK. */
5233 if (GET_CODE (x) == CODE_LABEL)
5237 fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n");
5241 if (GET_CODE (x) != NOTE
5242 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5243 || NOTE_BASIC_BLOCK (x) != bb)
5245 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5251 /* Do checks for empty blocks here */
5258 if (GET_CODE (x) == NOTE
5259 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5261 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n",
5262 INSN_UID (x), bb->index);
5268 if (GET_CODE (x) == JUMP_INSN
5269 || GET_CODE (x) == CODE_LABEL
5270 || GET_CODE (x) == BARRIER)
5272 fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n",
5284 if (!bb_info[INSN_UID (x)])
5286 switch (GET_CODE (x))
5293 /* An addr_vec is placed outside any block block. */
5295 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5296 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5297 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5302 /* But in any case, non-deletable labels can appear anywhere. */
5306 fatal_insn ("verify_flow_info: Insn outside basic block\n", x);