1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
58 #include "hard-reg-set.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
69 /* ??? Eventually must record somehow the labels used by jumps
70 from nested functions. */
71 /* Pre-record the next or previous real insn for each label?
72 No, this pass is very fast anyway. */
73 /* Condense consecutive labels?
74 This would make life analysis faster, maybe. */
75 /* Optimize jump y; x: ... y: jumpif... x?
76 Don't know if it is worth bothering with. */
77 /* Optimize two cases of conditional jump to conditional jump?
78 This can never delete any instruction or make anything dead,
79 or even change what is live at any point.
80 So perhaps let combiner do it. */
82 /* Vector indexed by uid.
83 For each CODE_LABEL, index by its uid to get first unconditional jump
84 that jumps to the label.
85 For each JUMP_INSN, index by its uid to get the next unconditional jump
86 that jumps to the same label.
87 Element 0 is the start of a chain of all return insns.
88 (It is safe to use element 0 because insn uid 0 is not used. */
90 static rtx *jump_chain;
92 /* List of labels referred to from initializers.
93 These can never be deleted. */
96 /* Maximum index in jump_chain. */
98 static int max_jump_chain;
100 /* Set nonzero by jump_optimize if control can fall through
101 to the end of the function. */
104 /* Indicates whether death notes are significant in cross jump analysis.
105 Normally they are not significant, because of A and B jump to C,
106 and R dies in A, it must die in B. But this might not be true after
107 stack register conversion, and we must compare death notes in that
110 static int cross_jump_death_matters = 0;
112 static int init_label_info PROTO((rtx));
113 static void delete_barrier_successors PROTO((rtx));
114 static void mark_all_labels PROTO((rtx, int));
115 static rtx delete_unreferenced_labels PROTO((rtx));
116 static void delete_noop_moves PROTO((rtx));
117 static int calculate_can_reach_end PROTO((rtx, int, int));
118 static int duplicate_loop_exit_test PROTO((rtx));
119 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
120 static void do_cross_jump PROTO((rtx, rtx, rtx));
121 static int jump_back_p PROTO((rtx, rtx));
122 static int tension_vector_labels PROTO((rtx, int));
123 static void mark_jump_label PROTO((rtx, rtx, int));
124 static void delete_computation PROTO((rtx));
125 static void delete_from_jump_chain PROTO((rtx));
126 static int delete_labelref_insn PROTO((rtx, rtx, int));
127 static void mark_modified_reg PROTO((rtx, rtx));
128 static void redirect_tablejump PROTO((rtx, rtx));
129 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
131 static rtx find_insert_position PROTO((rtx, rtx));
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
150 rebuild_jump_labels (f)
153 jump_optimize_1 (f, 0, 0, 0, 1);
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
165 If NOOP_MOVES is nonzero, delete no-op move insns.
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
185 int mark_labels_only;
187 register rtx insn, next;
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
203 /* Leave some extra room for labels and duplicate exit test insns
205 max_jump_chain = max_uid * 14 / 10;
206 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
207 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
209 mark_all_labels (f, cross_jump);
211 /* Keep track of labels used from static data;
212 they cannot ever be deleted. */
214 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
215 LABEL_NUSES (XEXP (insn, 0))++;
217 check_exception_handler_labels ();
219 /* Keep track of labels used for marking handlers for exception
220 regions; they cannot usually be deleted. */
222 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
223 LABEL_NUSES (XEXP (insn, 0))++;
225 delete_barrier_successors (f);
227 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
228 notes and recompute LABEL_NUSES. */
229 if (mark_labels_only)
232 exception_optimize ();
234 last_insn = delete_unreferenced_labels (f);
238 /* CAN_REACH_END is persistent for each function. Once set it should
239 not be cleared. This is especially true for the case where we
240 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
241 the front-end before compiling each function. */
242 if (calculate_can_reach_end (last_insn, 1, 0))
245 /* Zero the "deleted" flag of all the "deleted" insns. */
246 for (insn = f; insn; insn = NEXT_INSN (insn))
247 INSN_DELETED_P (insn) = 0;
249 /* Show that the jump chain is not valid. */
257 /* If we fall through to the epilogue, see if we can insert a RETURN insn
258 in front of it. If the machine allows it at this point (we might be
259 after reload for a leaf routine), it will improve optimization for it
261 insn = get_last_insn ();
262 while (insn && GET_CODE (insn) == NOTE)
263 insn = PREV_INSN (insn);
265 if (insn && GET_CODE (insn) != BARRIER)
267 emit_jump_insn (gen_return ());
274 delete_noop_moves (f);
276 /* If we haven't yet gotten to reload and we have just run regscan,
277 delete any insn that sets a register that isn't used elsewhere.
278 This helps some of the optimizations below by having less insns
279 being jumped around. */
281 if (! reload_completed && after_regscan)
282 for (insn = f; insn; insn = next)
284 rtx set = single_set (insn);
286 next = NEXT_INSN (insn);
288 if (set && GET_CODE (SET_DEST (set)) == REG
289 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
290 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
291 /* We use regno_last_note_uid so as not to delete the setting
292 of a reg that's used in notes. A subsequent optimization
293 might arrange to use that reg for real. */
294 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
295 && ! side_effects_p (SET_SRC (set))
296 && ! find_reg_note (insn, REG_RETVAL, 0))
300 /* Now iterate optimizing jumps until nothing changes over one pass. */
302 old_max_reg = max_reg_num ();
307 for (insn = f; insn; insn = next)
310 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
312 int this_is_simplejump, this_is_condjump, reversep = 0;
313 int this_is_condjump_in_parallel;
316 /* If NOT the first iteration, if this is the last jump pass
317 (just before final), do the special peephole optimizations.
318 Avoiding the first iteration gives ordinary jump opts
319 a chance to work before peephole opts. */
321 if (reload_completed && !first && !flag_no_peephole)
322 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
326 /* That could have deleted some insns after INSN, so check now
327 what the following insn is. */
329 next = NEXT_INSN (insn);
331 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
332 jump. Try to optimize by duplicating the loop exit test if so.
333 This is only safe immediately after regscan, because it uses
334 the values of regno_first_uid and regno_last_uid. */
335 if (after_regscan && GET_CODE (insn) == NOTE
336 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
337 && (temp1 = next_nonnote_insn (insn)) != 0
338 && simplejump_p (temp1))
340 temp = PREV_INSN (insn);
341 if (duplicate_loop_exit_test (insn))
344 next = NEXT_INSN (temp);
349 if (GET_CODE (insn) != JUMP_INSN)
352 this_is_simplejump = simplejump_p (insn);
353 this_is_condjump = condjump_p (insn);
354 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
356 /* Tension the labels in dispatch tables. */
358 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
359 changed |= tension_vector_labels (PATTERN (insn), 0);
360 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
361 changed |= tension_vector_labels (PATTERN (insn), 1);
363 /* If a dispatch table always goes to the same place,
364 get rid of it and replace the insn that uses it. */
366 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
367 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
370 rtx pat = PATTERN (insn);
371 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
372 int len = XVECLEN (pat, diff_vec_p);
373 rtx dispatch = prev_real_insn (insn);
376 for (i = 0; i < len; i++)
377 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
378 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
383 && GET_CODE (dispatch) == JUMP_INSN
384 && JUMP_LABEL (dispatch) != 0
385 /* Don't mess with a casesi insn.
386 XXX according to the comment before computed_jump_p(),
387 all casesi insns should be a parallel of the jump
388 and a USE of a LABEL_REF. */
389 && ! ((set = single_set (dispatch)) != NULL
390 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
391 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
393 redirect_tablejump (dispatch,
394 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
399 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
401 /* If a jump references the end of the function, try to turn
402 it into a RETURN insn, possibly a conditional one. */
403 if (JUMP_LABEL (insn)
404 && (next_active_insn (JUMP_LABEL (insn)) == 0
405 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
407 changed |= redirect_jump (insn, NULL_RTX);
409 /* Detect jump to following insn. */
410 if (reallabelprev == insn && condjump_p (insn))
412 next = next_real_insn (JUMP_LABEL (insn));
418 /* If we have an unconditional jump preceded by a USE, try to put
419 the USE before the target and jump there. This simplifies many
420 of the optimizations below since we don't have to worry about
421 dealing with these USE insns. We only do this if the label
422 being branch to already has the identical USE or if code
423 never falls through to that label. */
425 if (this_is_simplejump
426 && (temp = prev_nonnote_insn (insn)) != 0
427 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
428 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
429 && (GET_CODE (temp1) == BARRIER
430 || (GET_CODE (temp1) == INSN
431 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
432 /* Don't do this optimization if we have a loop containing only
433 the USE instruction, and the loop start label has a usage
434 count of 1. This is because we will redo this optimization
435 everytime through the outer loop, and jump opt will never
437 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
438 && temp2 == JUMP_LABEL (insn)
439 && LABEL_NUSES (temp2) == 1))
441 if (GET_CODE (temp1) == BARRIER)
443 emit_insn_after (PATTERN (temp), temp1);
444 temp1 = NEXT_INSN (temp1);
448 redirect_jump (insn, get_label_before (temp1));
449 reallabelprev = prev_real_insn (temp1);
453 /* Simplify if (...) x = a; else x = b; by converting it
454 to x = b; if (...) x = a;
455 if B is sufficiently simple, the test doesn't involve X,
456 and nothing in the test modifies B or X.
458 If we have small register classes, we also can't do this if X
461 If the "x = b;" insn has any REG_NOTES, we don't do this because
462 of the possibility that we are running after CSE and there is a
463 REG_EQUAL note that is only valid if the branch has already been
464 taken. If we move the insn with the REG_EQUAL note, we may
465 fold the comparison to always be false in a later CSE pass.
466 (We could also delete the REG_NOTES when moving the insn, but it
467 seems simpler to not move it.) An exception is that we can move
468 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
469 value is the same as "b".
471 INSN is the branch over the `else' part.
475 TEMP to the jump insn preceding "x = a;"
477 TEMP2 to the insn that sets "x = b;"
478 TEMP3 to the insn that sets "x = a;"
479 TEMP4 to the set of "x = b"; */
481 if (this_is_simplejump
482 && (temp3 = prev_active_insn (insn)) != 0
483 && GET_CODE (temp3) == INSN
484 && (temp4 = single_set (temp3)) != 0
485 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
486 && (! SMALL_REGISTER_CLASSES
487 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
488 && (temp2 = next_active_insn (insn)) != 0
489 && GET_CODE (temp2) == INSN
490 && (temp4 = single_set (temp2)) != 0
491 && rtx_equal_p (SET_DEST (temp4), temp1)
492 && ! side_effects_p (SET_SRC (temp4))
493 && ! may_trap_p (SET_SRC (temp4))
494 && (REG_NOTES (temp2) == 0
495 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
496 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
497 && XEXP (REG_NOTES (temp2), 1) == 0
498 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
500 && (temp = prev_active_insn (temp3)) != 0
501 && condjump_p (temp) && ! simplejump_p (temp)
502 /* TEMP must skip over the "x = a;" insn */
503 && prev_real_insn (JUMP_LABEL (temp)) == insn
504 && no_labels_between_p (insn, JUMP_LABEL (temp))
505 /* There must be no other entries to the "x = b;" insn. */
506 && no_labels_between_p (JUMP_LABEL (temp), temp2)
507 /* INSN must either branch to the insn after TEMP2 or the insn
508 after TEMP2 must branch to the same place as INSN. */
509 && (reallabelprev == temp2
510 || ((temp5 = next_active_insn (temp2)) != 0
511 && simplejump_p (temp5)
512 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
514 /* The test expression, X, may be a complicated test with
515 multiple branches. See if we can find all the uses of
516 the label that TEMP branches to without hitting a CALL_INSN
517 or a jump to somewhere else. */
518 rtx target = JUMP_LABEL (temp);
519 int nuses = LABEL_NUSES (target);
525 /* Set P to the first jump insn that goes around "x = a;". */
526 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
528 if (GET_CODE (p) == JUMP_INSN)
530 if (condjump_p (p) && ! simplejump_p (p)
531 && JUMP_LABEL (p) == target)
540 else if (GET_CODE (p) == CALL_INSN)
545 /* We cannot insert anything between a set of cc and its use
546 so if P uses cc0, we must back up to the previous insn. */
547 q = prev_nonnote_insn (p);
548 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
549 && sets_cc0_p (PATTERN (q)))
556 /* If we found all the uses and there was no data conflict, we
557 can move the assignment unless we can branch into the middle
560 && no_labels_between_p (p, insn)
561 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
562 && ! reg_set_between_p (temp1, p, temp3)
563 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
564 || ! modified_between_p (SET_SRC (temp4), p, temp2))
565 /* Verify that registers used by the jump are not clobbered
566 by the instruction being moved. */
567 && ! regs_set_between_p (PATTERN (temp),
571 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
574 /* Set NEXT to an insn that we know won't go away. */
575 next = next_active_insn (insn);
577 /* Delete the jump around the set. Note that we must do
578 this before we redirect the test jumps so that it won't
579 delete the code immediately following the assignment
580 we moved (which might be a jump). */
584 /* We either have two consecutive labels or a jump to
585 a jump, so adjust all the JUMP_INSNs to branch to where
587 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
588 if (GET_CODE (p) == JUMP_INSN)
589 redirect_jump (p, target);
596 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
597 to x = a; if (...) goto l; x = b;
598 if A is sufficiently simple, the test doesn't involve X,
599 and nothing in the test modifies A or X.
601 If we have small register classes, we also can't do this if X
604 If the "x = a;" insn has any REG_NOTES, we don't do this because
605 of the possibility that we are running after CSE and there is a
606 REG_EQUAL note that is only valid if the branch has already been
607 taken. If we move the insn with the REG_EQUAL note, we may
608 fold the comparison to always be false in a later CSE pass.
609 (We could also delete the REG_NOTES when moving the insn, but it
610 seems simpler to not move it.) An exception is that we can move
611 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
612 value is the same as "a".
618 TEMP to the jump insn preceding "x = a;"
620 TEMP2 to the insn that sets "x = b;"
621 TEMP3 to the insn that sets "x = a;"
622 TEMP4 to the set of "x = a"; */
624 if (this_is_simplejump
625 && (temp2 = next_active_insn (insn)) != 0
626 && GET_CODE (temp2) == INSN
627 && (temp4 = single_set (temp2)) != 0
628 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
629 && (! SMALL_REGISTER_CLASSES
630 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
631 && (temp3 = prev_active_insn (insn)) != 0
632 && GET_CODE (temp3) == INSN
633 && (temp4 = single_set (temp3)) != 0
634 && rtx_equal_p (SET_DEST (temp4), temp1)
635 && ! side_effects_p (SET_SRC (temp4))
636 && ! may_trap_p (SET_SRC (temp4))
637 && (REG_NOTES (temp3) == 0
638 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
639 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
640 && XEXP (REG_NOTES (temp3), 1) == 0
641 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
643 && (temp = prev_active_insn (temp3)) != 0
644 && condjump_p (temp) && ! simplejump_p (temp)
645 /* TEMP must skip over the "x = a;" insn */
646 && prev_real_insn (JUMP_LABEL (temp)) == insn
647 && no_labels_between_p (temp, insn))
649 rtx prev_label = JUMP_LABEL (temp);
650 rtx insert_after = prev_nonnote_insn (temp);
653 /* We cannot insert anything between a set of cc and its use. */
654 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
655 && sets_cc0_p (PATTERN (insert_after)))
656 insert_after = prev_nonnote_insn (insert_after);
658 ++LABEL_NUSES (prev_label);
661 && no_labels_between_p (insert_after, temp)
662 && ! reg_referenced_between_p (temp1, insert_after, temp3)
663 && ! reg_referenced_between_p (temp1, temp3,
665 && ! reg_set_between_p (temp1, insert_after, temp)
666 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
667 /* Verify that registers used by the jump are not clobbered
668 by the instruction being moved. */
669 && ! regs_set_between_p (PATTERN (temp),
672 && invert_jump (temp, JUMP_LABEL (insn)))
674 emit_insn_after_with_line_notes (PATTERN (temp3),
675 insert_after, temp3);
678 /* Set NEXT to an insn that we know won't go away. */
682 if (prev_label && --LABEL_NUSES (prev_label) == 0)
683 delete_insn (prev_label);
689 /* If we have if (...) x = exp; and branches are expensive,
690 EXP is a single insn, does not have any side effects, cannot
691 trap, and is not too costly, convert this to
692 t = exp; if (...) x = t;
694 Don't do this when we have CC0 because it is unlikely to help
695 and we'd need to worry about where to place the new insn and
696 the potential for conflicts. We also can't do this when we have
697 notes on the insn for the same reason as above.
701 TEMP to the "x = exp;" insn.
702 TEMP1 to the single set in the "x = exp;" insn.
705 if (! reload_completed
706 && this_is_condjump && ! this_is_simplejump
708 && (temp = next_nonnote_insn (insn)) != 0
709 && GET_CODE (temp) == INSN
710 && REG_NOTES (temp) == 0
711 && (reallabelprev == temp
712 || ((temp2 = next_active_insn (temp)) != 0
713 && simplejump_p (temp2)
714 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
715 && (temp1 = single_set (temp)) != 0
716 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
717 && (! SMALL_REGISTER_CLASSES
718 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
719 && GET_CODE (SET_SRC (temp1)) != REG
720 && GET_CODE (SET_SRC (temp1)) != SUBREG
721 && GET_CODE (SET_SRC (temp1)) != CONST_INT
722 && ! side_effects_p (SET_SRC (temp1))
723 && ! may_trap_p (SET_SRC (temp1))
724 && rtx_cost (SET_SRC (temp1), SET) < 10)
726 rtx new = gen_reg_rtx (GET_MODE (temp2));
728 if ((temp3 = find_insert_position (insn, temp))
729 && validate_change (temp, &SET_DEST (temp1), new, 0))
731 next = emit_insn_after (gen_move_insn (temp2, new), insn);
732 emit_insn_after_with_line_notes (PATTERN (temp),
733 PREV_INSN (temp3), temp);
735 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
739 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
740 old_max_reg = max_reg_num ();
745 /* Similarly, if it takes two insns to compute EXP but they
746 have the same destination. Here TEMP3 will be the second
747 insn and TEMP4 the SET from that insn. */
749 if (! reload_completed
750 && this_is_condjump && ! this_is_simplejump
752 && (temp = next_nonnote_insn (insn)) != 0
753 && GET_CODE (temp) == INSN
754 && REG_NOTES (temp) == 0
755 && (temp3 = next_nonnote_insn (temp)) != 0
756 && GET_CODE (temp3) == INSN
757 && REG_NOTES (temp3) == 0
758 && (reallabelprev == temp3
759 || ((temp2 = next_active_insn (temp3)) != 0
760 && simplejump_p (temp2)
761 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
762 && (temp1 = single_set (temp)) != 0
763 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
764 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
765 && (! SMALL_REGISTER_CLASSES
766 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
767 && ! side_effects_p (SET_SRC (temp1))
768 && ! may_trap_p (SET_SRC (temp1))
769 && rtx_cost (SET_SRC (temp1), SET) < 10
770 && (temp4 = single_set (temp3)) != 0
771 && rtx_equal_p (SET_DEST (temp4), temp2)
772 && ! side_effects_p (SET_SRC (temp4))
773 && ! may_trap_p (SET_SRC (temp4))
774 && rtx_cost (SET_SRC (temp4), SET) < 10)
776 rtx new = gen_reg_rtx (GET_MODE (temp2));
778 if ((temp5 = find_insert_position (insn, temp))
779 && (temp6 = find_insert_position (insn, temp3))
780 && validate_change (temp, &SET_DEST (temp1), new, 0))
782 /* Use the earliest of temp5 and temp6. */
785 next = emit_insn_after (gen_move_insn (temp2, new), insn);
786 emit_insn_after_with_line_notes (PATTERN (temp),
787 PREV_INSN (temp6), temp);
788 emit_insn_after_with_line_notes
789 (replace_rtx (PATTERN (temp3), temp2, new),
790 PREV_INSN (temp6), temp3);
793 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
797 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
798 old_max_reg = max_reg_num ();
803 /* Finally, handle the case where two insns are used to
804 compute EXP but a temporary register is used. Here we must
805 ensure that the temporary register is not used anywhere else. */
807 if (! reload_completed
809 && this_is_condjump && ! this_is_simplejump
811 && (temp = next_nonnote_insn (insn)) != 0
812 && GET_CODE (temp) == INSN
813 && REG_NOTES (temp) == 0
814 && (temp3 = next_nonnote_insn (temp)) != 0
815 && GET_CODE (temp3) == INSN
816 && REG_NOTES (temp3) == 0
817 && (reallabelprev == temp3
818 || ((temp2 = next_active_insn (temp3)) != 0
819 && simplejump_p (temp2)
820 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
821 && (temp1 = single_set (temp)) != 0
822 && (temp5 = SET_DEST (temp1),
823 (GET_CODE (temp5) == REG
824 || (GET_CODE (temp5) == SUBREG
825 && (temp5 = SUBREG_REG (temp5),
826 GET_CODE (temp5) == REG))))
827 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
828 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
829 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
830 && ! side_effects_p (SET_SRC (temp1))
831 && ! may_trap_p (SET_SRC (temp1))
832 && rtx_cost (SET_SRC (temp1), SET) < 10
833 && (temp4 = single_set (temp3)) != 0
834 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
835 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
836 && (! SMALL_REGISTER_CLASSES
837 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
838 && rtx_equal_p (SET_DEST (temp4), temp2)
839 && ! side_effects_p (SET_SRC (temp4))
840 && ! may_trap_p (SET_SRC (temp4))
841 && rtx_cost (SET_SRC (temp4), SET) < 10)
843 rtx new = gen_reg_rtx (GET_MODE (temp2));
845 if ((temp5 = find_insert_position (insn, temp))
846 && (temp6 = find_insert_position (insn, temp3))
847 && validate_change (temp3, &SET_DEST (temp4), new, 0))
849 /* Use the earliest of temp5 and temp6. */
852 next = emit_insn_after (gen_move_insn (temp2, new), insn);
853 emit_insn_after_with_line_notes (PATTERN (temp),
854 PREV_INSN (temp6), temp);
855 emit_insn_after_with_line_notes (PATTERN (temp3),
856 PREV_INSN (temp6), temp3);
859 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
863 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
864 old_max_reg = max_reg_num ();
868 #endif /* HAVE_cc0 */
870 /* Try to use a conditional move (if the target has them), or a
871 store-flag insn. The general case is:
873 1) x = a; if (...) x = b; and
876 If the jump would be faster, the machine should not have defined
877 the movcc or scc insns!. These cases are often made by the
878 previous optimization.
880 The second case is treated as x = x; if (...) x = b;.
882 INSN here is the jump around the store. We set:
884 TEMP to the "x = b;" insn.
887 TEMP3 to A (X in the second case).
888 TEMP4 to the condition being tested.
889 TEMP5 to the earliest insn used to find the condition. */
891 if (/* We can't do this after reload has completed. */
893 && this_is_condjump && ! this_is_simplejump
894 /* Set TEMP to the "x = b;" insn. */
895 && (temp = next_nonnote_insn (insn)) != 0
896 && GET_CODE (temp) == INSN
897 && GET_CODE (PATTERN (temp)) == SET
898 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
899 && (! SMALL_REGISTER_CLASSES
900 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
901 && ! side_effects_p (temp2 = SET_SRC (PATTERN (temp)))
902 && ! may_trap_p (temp2)
903 /* Allow either form, but prefer the former if both apply.
904 There is no point in using the old value of TEMP1 if
905 it is a register, since cse will alias them. It can
906 lose if the old value were a hard register since CSE
907 won't replace hard registers. Avoid using TEMP3 if
908 small register classes and it is a hard register. */
909 && (((temp3 = reg_set_last (temp1, insn)) != 0
910 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
911 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
912 /* Make the latter case look like x = x; if (...) x = b; */
913 || (temp3 = temp1, 1))
914 /* INSN must either branch to the insn after TEMP or the insn
915 after TEMP must branch to the same place as INSN. */
916 && (reallabelprev == temp
917 || ((temp4 = next_active_insn (temp)) != 0
918 && simplejump_p (temp4)
919 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
920 && (temp4 = get_condition (insn, &temp5)) != 0
921 /* We must be comparing objects whose modes imply the size.
922 We could handle BLKmode if (1) emit_store_flag could
923 and (2) we could find the size reliably. */
924 && GET_MODE (XEXP (temp4, 0)) != BLKmode
925 /* Even if branches are cheap, the store_flag optimization
926 can win when the operation to be performed can be
927 expressed directly. */
929 /* If the previous insn sets CC0 and something else, we can't
930 do this since we are going to delete that insn. */
932 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
933 && GET_CODE (temp6) == INSN
934 && (sets_cc0_p (PATTERN (temp6)) == -1
935 || (sets_cc0_p (PATTERN (temp6)) == 1
936 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
940 #ifdef HAVE_conditional_move
941 /* First try a conditional move. */
943 enum rtx_code code = GET_CODE (temp4);
945 rtx cond0, cond1, aval, bval;
948 /* Copy the compared variables into cond0 and cond1, so that
949 any side effects performed in or after the old comparison,
950 will not affect our compare which will come later. */
951 /* ??? Is it possible to just use the comparison in the jump
952 insn? After all, we're going to delete it. We'd have
953 to modify emit_conditional_move to take a comparison rtx
954 instead or write a new function. */
955 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
956 /* We want the target to be able to simplify comparisons with
957 zero (and maybe other constants as well), so don't create
958 pseudos for them. There's no need to either. */
959 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
960 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
961 cond1 = XEXP (temp4, 1);
963 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
969 target = emit_conditional_move (var, code,
970 cond0, cond1, VOIDmode,
971 aval, bval, GET_MODE (var),
972 (code == LTU || code == GEU
973 || code == LEU || code == GTU));
979 /* Save the conditional move sequence but don't emit it
980 yet. On some machines, like the alpha, it is possible
981 that temp5 == insn, so next generate the sequence that
982 saves the compared values and then emit both
983 sequences ensuring seq1 occurs before seq2. */
987 /* Now that we can't fail, generate the copy insns that
988 preserve the compared values. */
990 emit_move_insn (cond0, XEXP (temp4, 0));
991 if (cond1 != XEXP (temp4, 1))
992 emit_move_insn (cond1, XEXP (temp4, 1));
996 emit_insns_before (seq1, temp5);
997 /* Insert conditional move after insn, to be sure that
998 the jump and a possible compare won't be separated */
999 last = emit_insns_after (seq2, insn);
1001 /* ??? We can also delete the insn that sets X to A.
1002 Flow will do it too though. */
1004 next = NEXT_INSN (insn);
1009 reg_scan_update (seq1, NEXT_INSN (last), old_max_reg);
1010 old_max_reg = max_reg_num ();
1021 /* That didn't work, try a store-flag insn.
1023 We further divide the cases into:
1025 1) x = a; if (...) x = b; and either A or B is zero,
1026 2) if (...) x = 0; and jumps are expensive,
1027 3) x = a; if (...) x = b; and A and B are constants where all
1028 the set bits in A are also set in B and jumps are expensive,
1029 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1031 5) if (...) x = b; if jumps are even more expensive. */
1033 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1034 && ((GET_CODE (temp3) == CONST_INT)
1035 /* Make the latter case look like
1036 x = x; if (...) x = 0; */
1039 && temp2 == const0_rtx)
1040 || BRANCH_COST >= 3)))
1041 /* If B is zero, OK; if A is zero, can only do (1) if we
1042 can reverse the condition. See if (3) applies possibly
1043 by reversing the condition. Prefer reversing to (4) when
1044 branches are very expensive. */
1045 && (((BRANCH_COST >= 2
1046 || STORE_FLAG_VALUE == -1
1047 || (STORE_FLAG_VALUE == 1
1048 /* Check that the mask is a power of two,
1049 so that it can probably be generated
1051 && GET_CODE (temp3) == CONST_INT
1052 && exact_log2 (INTVAL (temp3)) >= 0))
1053 && (reversep = 0, temp2 == const0_rtx))
1054 || ((BRANCH_COST >= 2
1055 || STORE_FLAG_VALUE == -1
1056 || (STORE_FLAG_VALUE == 1
1057 && GET_CODE (temp2) == CONST_INT
1058 && exact_log2 (INTVAL (temp2)) >= 0))
1059 && temp3 == const0_rtx
1060 && (reversep = can_reverse_comparison_p (temp4, insn)))
1061 || (BRANCH_COST >= 2
1062 && GET_CODE (temp2) == CONST_INT
1063 && GET_CODE (temp3) == CONST_INT
1064 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1065 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1066 && (reversep = can_reverse_comparison_p (temp4,
1068 || BRANCH_COST >= 3)
1071 enum rtx_code code = GET_CODE (temp4);
1072 rtx uval, cval, var = temp1;
1076 /* If necessary, reverse the condition. */
1078 code = reverse_condition (code), uval = temp2, cval = temp3;
1080 uval = temp3, cval = temp2;
1082 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1083 is the constant 1, it is best to just compute the result
1084 directly. If UVAL is constant and STORE_FLAG_VALUE
1085 includes all of its bits, it is best to compute the flag
1086 value unnormalized and `and' it with UVAL. Otherwise,
1087 normalize to -1 and `and' with UVAL. */
1088 normalizep = (cval != const0_rtx ? -1
1089 : (uval == const1_rtx ? 1
1090 : (GET_CODE (uval) == CONST_INT
1091 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1094 /* We will be putting the store-flag insn immediately in
1095 front of the comparison that was originally being done,
1096 so we know all the variables in TEMP4 will be valid.
1097 However, this might be in front of the assignment of
1098 A to VAR. If it is, it would clobber the store-flag
1099 we will be emitting.
1101 Therefore, emit into a temporary which will be copied to
1102 VAR immediately after TEMP. */
1105 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1106 XEXP (temp4, 0), XEXP (temp4, 1),
1108 (code == LTU || code == LEU
1109 || code == GEU || code == GTU),
1119 /* Put the store-flag insns in front of the first insn
1120 used to compute the condition to ensure that we
1121 use the same values of them as the current
1122 comparison. However, the remainder of the insns we
1123 generate will be placed directly in front of the
1124 jump insn, in case any of the pseudos we use
1125 are modified earlier. */
1127 emit_insns_before (seq, temp5);
1131 /* Both CVAL and UVAL are non-zero. */
1132 if (cval != const0_rtx && uval != const0_rtx)
1136 tem1 = expand_and (uval, target, NULL_RTX);
1137 if (GET_CODE (cval) == CONST_INT
1138 && GET_CODE (uval) == CONST_INT
1139 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1143 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1144 target, NULL_RTX, 0);
1145 tem2 = expand_and (cval, tem2,
1146 (GET_CODE (tem2) == REG
1150 /* If we usually make new pseudos, do so here. This
1151 turns out to help machines that have conditional
1153 /* ??? Conditional moves have already been handled.
1154 This may be obsolete. */
1156 if (flag_expensive_optimizations)
1159 target = expand_binop (GET_MODE (var), ior_optab,
1163 else if (normalizep != 1)
1165 /* We know that either CVAL or UVAL is zero. If
1166 UVAL is zero, negate TARGET and `and' with CVAL.
1167 Otherwise, `and' with UVAL. */
1168 if (uval == const0_rtx)
1170 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1171 target, NULL_RTX, 0);
1175 target = expand_and (uval, target,
1176 (GET_CODE (target) == REG
1177 && ! preserve_subexpressions_p ()
1178 ? target : NULL_RTX));
1181 emit_move_insn (var, target);
1185 /* If INSN uses CC0, we must not separate it from the
1186 insn that sets cc0. */
1187 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1188 before = prev_nonnote_insn (before);
1190 emit_insns_before (seq, before);
1193 next = NEXT_INSN (insn);
1198 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1199 old_max_reg = max_reg_num ();
1210 /* If branches are expensive, convert
1211 if (foo) bar++; to bar += (foo != 0);
1212 and similarly for "bar--;"
1214 INSN is the conditional branch around the arithmetic. We set:
1216 TEMP is the arithmetic insn.
1217 TEMP1 is the SET doing the arithmetic.
1218 TEMP2 is the operand being incremented or decremented.
1219 TEMP3 to the condition being tested.
1220 TEMP4 to the earliest insn used to find the condition. */
1222 if ((BRANCH_COST >= 2
1230 && ! reload_completed
1231 && this_is_condjump && ! this_is_simplejump
1232 && (temp = next_nonnote_insn (insn)) != 0
1233 && (temp1 = single_set (temp)) != 0
1234 && (temp2 = SET_DEST (temp1),
1235 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1236 && GET_CODE (SET_SRC (temp1)) == PLUS
1237 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1238 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1239 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1240 && ! side_effects_p (temp2)
1241 && ! may_trap_p (temp2)
1242 /* INSN must either branch to the insn after TEMP or the insn
1243 after TEMP must branch to the same place as INSN. */
1244 && (reallabelprev == temp
1245 || ((temp3 = next_active_insn (temp)) != 0
1246 && simplejump_p (temp3)
1247 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1248 && (temp3 = get_condition (insn, &temp4)) != 0
1249 /* We must be comparing objects whose modes imply the size.
1250 We could handle BLKmode if (1) emit_store_flag could
1251 and (2) we could find the size reliably. */
1252 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1253 && can_reverse_comparison_p (temp3, insn))
1255 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1256 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1260 /* It must be the case that TEMP2 is not modified in the range
1261 [TEMP4, INSN). The one exception we make is if the insn
1262 before INSN sets TEMP2 to something which is also unchanged
1263 in that range. In that case, we can move the initialization
1264 into our sequence. */
1266 if ((temp5 = prev_active_insn (insn)) != 0
1267 && no_labels_between_p (temp5, insn)
1268 && GET_CODE (temp5) == INSN
1269 && (temp6 = single_set (temp5)) != 0
1270 && rtx_equal_p (temp2, SET_DEST (temp6))
1271 && (CONSTANT_P (SET_SRC (temp6))
1272 || GET_CODE (SET_SRC (temp6)) == REG
1273 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1275 emit_insn (PATTERN (temp5));
1277 init = SET_SRC (temp6);
1280 if (CONSTANT_P (init)
1281 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1282 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1283 XEXP (temp3, 0), XEXP (temp3, 1),
1285 (code == LTU || code == LEU
1286 || code == GTU || code == GEU), 1);
1288 /* If we can do the store-flag, do the addition or
1292 target = expand_binop (GET_MODE (temp2),
1293 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1294 ? add_optab : sub_optab),
1295 temp2, target, temp2, 0, OPTAB_WIDEN);
1299 /* Put the result back in temp2 in case it isn't already.
1300 Then replace the jump, possible a CC0-setting insn in
1301 front of the jump, and TEMP, with the sequence we have
1304 if (target != temp2)
1305 emit_move_insn (temp2, target);
1310 emit_insns_before (seq, temp4);
1314 delete_insn (init_insn);
1316 next = NEXT_INSN (insn);
1318 delete_insn (prev_nonnote_insn (insn));
1324 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1325 old_max_reg = max_reg_num ();
1335 /* Simplify if (...) x = 1; else {...} if (x) ...
1336 We recognize this case scanning backwards as well.
1338 TEMP is the assignment to x;
1339 TEMP1 is the label at the head of the second if. */
1340 /* ?? This should call get_condition to find the values being
1341 compared, instead of looking for a COMPARE insn when HAVE_cc0
1342 is not defined. This would allow it to work on the m88k. */
1343 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1344 is not defined and the condition is tested by a separate compare
1345 insn. This is because the code below assumes that the result
1346 of the compare dies in the following branch.
1348 Not only that, but there might be other insns between the
1349 compare and branch whose results are live. Those insns need
1352 A way to fix this is to move the insns at JUMP_LABEL (insn)
1353 to before INSN. If we are running before flow, they will
1354 be deleted if they aren't needed. But this doesn't work
1357 This is really a special-case of jump threading, anyway. The
1358 right thing to do is to replace this and jump threading with
1359 much simpler code in cse.
1361 This code has been turned off in the non-cc0 case in the
1365 else if (this_is_simplejump
1366 /* Safe to skip USE and CLOBBER insns here
1367 since they will not be deleted. */
1368 && (temp = prev_active_insn (insn))
1369 && no_labels_between_p (temp, insn)
1370 && GET_CODE (temp) == INSN
1371 && GET_CODE (PATTERN (temp)) == SET
1372 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1373 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1374 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1375 /* If we find that the next value tested is `x'
1376 (TEMP1 is the insn where this happens), win. */
1377 && GET_CODE (temp1) == INSN
1378 && GET_CODE (PATTERN (temp1)) == SET
1380 /* Does temp1 `tst' the value of x? */
1381 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1382 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1383 && (temp1 = next_nonnote_insn (temp1))
1385 /* Does temp1 compare the value of x against zero? */
1386 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1387 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1388 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1389 == SET_DEST (PATTERN (temp)))
1390 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1391 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1393 && condjump_p (temp1))
1395 /* Get the if_then_else from the condjump. */
1396 rtx choice = SET_SRC (PATTERN (temp1));
1397 if (GET_CODE (choice) == IF_THEN_ELSE)
1399 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1400 rtx val = SET_SRC (PATTERN (temp));
1402 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1406 if (cond == const_true_rtx)
1407 ultimate = XEXP (choice, 1);
1408 else if (cond == const0_rtx)
1409 ultimate = XEXP (choice, 2);
1413 if (ultimate == pc_rtx)
1414 ultimate = get_label_after (temp1);
1415 else if (ultimate && GET_CODE (ultimate) != RETURN)
1416 ultimate = XEXP (ultimate, 0);
1418 if (ultimate && JUMP_LABEL(insn) != ultimate)
1419 changed |= redirect_jump (insn, ultimate);
1425 /* @@ This needs a bit of work before it will be right.
1427 Any type of comparison can be accepted for the first and
1428 second compare. When rewriting the first jump, we must
1429 compute the what conditions can reach label3, and use the
1430 appropriate code. We can not simply reverse/swap the code
1431 of the first jump. In some cases, the second jump must be
1435 < == converts to > ==
1436 < != converts to == >
1439 If the code is written to only accept an '==' test for the second
1440 compare, then all that needs to be done is to swap the condition
1441 of the first branch.
1443 It is questionable whether we want this optimization anyways,
1444 since if the user wrote code like this because he/she knew that
1445 the jump to label1 is taken most of the time, then rewriting
1446 this gives slower code. */
1447 /* @@ This should call get_condition to find the values being
1448 compared, instead of looking for a COMPARE insn when HAVE_cc0
1449 is not defined. This would allow it to work on the m88k. */
1450 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1451 is not defined and the condition is tested by a separate compare
1452 insn. This is because the code below assumes that the result
1453 of the compare dies in the following branch. */
1455 /* Simplify test a ~= b
1469 where ~= is an inequality, e.g. >, and ~~= is the swapped
1472 We recognize this case scanning backwards.
1474 TEMP is the conditional jump to `label2';
1475 TEMP1 is the test for `a == b';
1476 TEMP2 is the conditional jump to `label1';
1477 TEMP3 is the test for `a ~= b'. */
1478 else if (this_is_simplejump
1479 && (temp = prev_active_insn (insn))
1480 && no_labels_between_p (temp, insn)
1481 && condjump_p (temp)
1482 && (temp1 = prev_active_insn (temp))
1483 && no_labels_between_p (temp1, temp)
1484 && GET_CODE (temp1) == INSN
1485 && GET_CODE (PATTERN (temp1)) == SET
1487 && sets_cc0_p (PATTERN (temp1)) == 1
1489 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1490 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1491 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1493 && (temp2 = prev_active_insn (temp1))
1494 && no_labels_between_p (temp2, temp1)
1495 && condjump_p (temp2)
1496 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1497 && (temp3 = prev_active_insn (temp2))
1498 && no_labels_between_p (temp3, temp2)
1499 && GET_CODE (PATTERN (temp3)) == SET
1500 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1501 SET_DEST (PATTERN (temp1)))
1502 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1503 SET_SRC (PATTERN (temp3)))
1504 && ! inequality_comparisons_p (PATTERN (temp))
1505 && inequality_comparisons_p (PATTERN (temp2)))
1507 rtx fallthrough_label = JUMP_LABEL (temp2);
1509 ++LABEL_NUSES (fallthrough_label);
1510 if (swap_jump (temp2, JUMP_LABEL (insn)))
1516 if (--LABEL_NUSES (fallthrough_label) == 0)
1517 delete_insn (fallthrough_label);
1520 /* Simplify if (...) {... x = 1;} if (x) ...
1522 We recognize this case backwards.
1524 TEMP is the test of `x';
1525 TEMP1 is the assignment to `x' at the end of the
1526 previous statement. */
1527 /* @@ This should call get_condition to find the values being
1528 compared, instead of looking for a COMPARE insn when HAVE_cc0
1529 is not defined. This would allow it to work on the m88k. */
1530 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1531 is not defined and the condition is tested by a separate compare
1532 insn. This is because the code below assumes that the result
1533 of the compare dies in the following branch. */
1535 /* ??? This has to be turned off. The problem is that the
1536 unconditional jump might indirectly end up branching to the
1537 label between TEMP1 and TEMP. We can't detect this, in general,
1538 since it may become a jump to there after further optimizations.
1539 If that jump is done, it will be deleted, so we will retry
1540 this optimization in the next pass, thus an infinite loop.
1542 The present code prevents this by putting the jump after the
1543 label, but this is not logically correct. */
1545 else if (this_is_condjump
1546 /* Safe to skip USE and CLOBBER insns here
1547 since they will not be deleted. */
1548 && (temp = prev_active_insn (insn))
1549 && no_labels_between_p (temp, insn)
1550 && GET_CODE (temp) == INSN
1551 && GET_CODE (PATTERN (temp)) == SET
1553 && sets_cc0_p (PATTERN (temp)) == 1
1554 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1556 /* Temp must be a compare insn, we can not accept a register
1557 to register move here, since it may not be simply a
1559 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1560 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1561 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1562 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1563 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1565 /* May skip USE or CLOBBER insns here
1566 for checking for opportunity, since we
1567 take care of them later. */
1568 && (temp1 = prev_active_insn (temp))
1569 && GET_CODE (temp1) == INSN
1570 && GET_CODE (PATTERN (temp1)) == SET
1572 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1574 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1575 == SET_DEST (PATTERN (temp1)))
1577 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1578 /* If this isn't true, cse will do the job. */
1579 && ! no_labels_between_p (temp1, temp))
1581 /* Get the if_then_else from the condjump. */
1582 rtx choice = SET_SRC (PATTERN (insn));
1583 if (GET_CODE (choice) == IF_THEN_ELSE
1584 && (GET_CODE (XEXP (choice, 0)) == EQ
1585 || GET_CODE (XEXP (choice, 0)) == NE))
1587 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1592 /* Get the place that condjump will jump to
1593 if it is reached from here. */
1594 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1596 ultimate = XEXP (choice, 1);
1598 ultimate = XEXP (choice, 2);
1599 /* Get it as a CODE_LABEL. */
1600 if (ultimate == pc_rtx)
1601 ultimate = get_label_after (insn);
1603 /* Get the label out of the LABEL_REF. */
1604 ultimate = XEXP (ultimate, 0);
1606 /* Insert the jump immediately before TEMP, specifically
1607 after the label that is between TEMP1 and TEMP. */
1608 last_insn = PREV_INSN (temp);
1610 /* If we would be branching to the next insn, the jump
1611 would immediately be deleted and the re-inserted in
1612 a subsequent pass over the code. So don't do anything
1614 if (next_active_insn (last_insn)
1615 != next_active_insn (ultimate))
1617 emit_barrier_after (last_insn);
1618 p = emit_jump_insn_after (gen_jump (ultimate),
1620 JUMP_LABEL (p) = ultimate;
1621 ++LABEL_NUSES (ultimate);
1622 if (INSN_UID (ultimate) < max_jump_chain
1623 && INSN_CODE (p) < max_jump_chain)
1625 jump_chain[INSN_UID (p)]
1626 = jump_chain[INSN_UID (ultimate)];
1627 jump_chain[INSN_UID (ultimate)] = p;
1635 /* Detect a conditional jump going to the same place
1636 as an immediately following unconditional jump. */
1637 else if (this_is_condjump
1638 && (temp = next_active_insn (insn)) != 0
1639 && simplejump_p (temp)
1640 && (next_active_insn (JUMP_LABEL (insn))
1641 == next_active_insn (JUMP_LABEL (temp))))
1645 /* ??? Optional. Disables some optimizations, but makes
1646 gcov output more accurate with -O. */
1647 if (flag_test_coverage && !reload_completed)
1648 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1649 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1660 /* Detect a conditional jump jumping over an unconditional trap. */
1662 && this_is_condjump && ! this_is_simplejump
1663 && reallabelprev != 0
1664 && GET_CODE (reallabelprev) == INSN
1665 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1666 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1667 && prev_active_insn (reallabelprev) == insn
1668 && no_labels_between_p (insn, reallabelprev)
1669 && (temp2 = get_condition (insn, &temp4))
1670 && can_reverse_comparison_p (temp2, insn))
1672 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1673 XEXP (temp2, 0), XEXP (temp2, 1),
1674 TRAP_CODE (PATTERN (reallabelprev)));
1678 emit_insn_before (new, temp4);
1679 delete_insn (reallabelprev);
1685 /* Detect a jump jumping to an unconditional trap. */
1686 else if (HAVE_trap && this_is_condjump
1687 && (temp = next_active_insn (JUMP_LABEL (insn)))
1688 && GET_CODE (temp) == INSN
1689 && GET_CODE (PATTERN (temp)) == TRAP_IF
1690 && (this_is_simplejump
1691 || (temp2 = get_condition (insn, &temp4))))
1693 rtx tc = TRAP_CONDITION (PATTERN (temp));
1695 if (tc == const_true_rtx
1696 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1699 /* Replace an unconditional jump to a trap with a trap. */
1700 if (this_is_simplejump)
1702 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1707 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1709 TRAP_CODE (PATTERN (temp)));
1712 emit_insn_before (new, temp4);
1718 /* If the trap condition and jump condition are mutually
1719 exclusive, redirect the jump to the following insn. */
1720 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1721 && ! this_is_simplejump
1722 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1723 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1724 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1725 && redirect_jump (insn, get_label_after (temp)))
1733 /* Detect a conditional jump jumping over an unconditional jump. */
1735 else if ((this_is_condjump || this_is_condjump_in_parallel)
1736 && ! this_is_simplejump
1737 && reallabelprev != 0
1738 && GET_CODE (reallabelprev) == JUMP_INSN
1739 && prev_active_insn (reallabelprev) == insn
1740 && no_labels_between_p (insn, reallabelprev)
1741 && simplejump_p (reallabelprev))
1743 /* When we invert the unconditional jump, we will be
1744 decrementing the usage count of its old label.
1745 Make sure that we don't delete it now because that
1746 might cause the following code to be deleted. */
1747 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1748 rtx prev_label = JUMP_LABEL (insn);
1751 ++LABEL_NUSES (prev_label);
1753 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1755 /* It is very likely that if there are USE insns before
1756 this jump, they hold REG_DEAD notes. These REG_DEAD
1757 notes are no longer valid due to this optimization,
1758 and will cause the life-analysis that following passes
1759 (notably delayed-branch scheduling) to think that
1760 these registers are dead when they are not.
1762 To prevent this trouble, we just remove the USE insns
1763 from the insn chain. */
1765 while (prev_uses && GET_CODE (prev_uses) == INSN
1766 && GET_CODE (PATTERN (prev_uses)) == USE)
1768 rtx useless = prev_uses;
1769 prev_uses = prev_nonnote_insn (prev_uses);
1770 delete_insn (useless);
1773 delete_insn (reallabelprev);
1778 /* We can now safely delete the label if it is unreferenced
1779 since the delete_insn above has deleted the BARRIER. */
1780 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1781 delete_insn (prev_label);
1786 /* Detect a jump to a jump. */
1788 nlabel = follow_jumps (JUMP_LABEL (insn));
1789 if (nlabel != JUMP_LABEL (insn)
1790 && redirect_jump (insn, nlabel))
1796 /* Look for if (foo) bar; else break; */
1797 /* The insns look like this:
1798 insn = condjump label1;
1799 ...range1 (some insns)...
1802 ...range2 (some insns)...
1803 jump somewhere unconditionally
1806 rtx label1 = next_label (insn);
1807 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1808 /* Don't do this optimization on the first round, so that
1809 jump-around-a-jump gets simplified before we ask here
1810 whether a jump is unconditional.
1812 Also don't do it when we are called after reload since
1813 it will confuse reorg. */
1815 && (reload_completed ? ! flag_delayed_branch : 1)
1816 /* Make sure INSN is something we can invert. */
1817 && condjump_p (insn)
1819 && JUMP_LABEL (insn) == label1
1820 && LABEL_NUSES (label1) == 1
1821 && GET_CODE (range1end) == JUMP_INSN
1822 && simplejump_p (range1end))
1824 rtx label2 = next_label (label1);
1825 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1826 if (range1end != range2end
1827 && JUMP_LABEL (range1end) == label2
1828 && GET_CODE (range2end) == JUMP_INSN
1829 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1830 /* Invert the jump condition, so we
1831 still execute the same insns in each case. */
1832 && invert_jump (insn, label1))
1834 rtx range1beg = next_active_insn (insn);
1835 rtx range2beg = next_active_insn (label1);
1836 rtx range1after, range2after;
1837 rtx range1before, range2before;
1840 /* Include in each range any notes before it, to be
1841 sure that we get the line number note if any, even
1842 if there are other notes here. */
1843 while (PREV_INSN (range1beg)
1844 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1845 range1beg = PREV_INSN (range1beg);
1847 while (PREV_INSN (range2beg)
1848 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1849 range2beg = PREV_INSN (range2beg);
1851 /* Don't move NOTEs for blocks or loops; shift them
1852 outside the ranges, where they'll stay put. */
1853 range1beg = squeeze_notes (range1beg, range1end);
1854 range2beg = squeeze_notes (range2beg, range2end);
1856 /* Get current surrounds of the 2 ranges. */
1857 range1before = PREV_INSN (range1beg);
1858 range2before = PREV_INSN (range2beg);
1859 range1after = NEXT_INSN (range1end);
1860 range2after = NEXT_INSN (range2end);
1862 /* Splice range2 where range1 was. */
1863 NEXT_INSN (range1before) = range2beg;
1864 PREV_INSN (range2beg) = range1before;
1865 NEXT_INSN (range2end) = range1after;
1866 PREV_INSN (range1after) = range2end;
1867 /* Splice range1 where range2 was. */
1868 NEXT_INSN (range2before) = range1beg;
1869 PREV_INSN (range1beg) = range2before;
1870 NEXT_INSN (range1end) = range2after;
1871 PREV_INSN (range2after) = range1end;
1873 /* Check for a loop end note between the end of
1874 range2, and the next code label. If there is one,
1875 then what we have really seen is
1876 if (foo) break; end_of_loop;
1877 and moved the break sequence outside the loop.
1878 We must move the LOOP_END note to where the
1879 loop really ends now, or we will confuse loop
1880 optimization. Stop if we find a LOOP_BEG note
1881 first, since we don't want to move the LOOP_END
1882 note in that case. */
1883 for (;range2after != label2; range2after = rangenext)
1885 rangenext = NEXT_INSN (range2after);
1886 if (GET_CODE (range2after) == NOTE)
1888 if (NOTE_LINE_NUMBER (range2after)
1889 == NOTE_INSN_LOOP_END)
1891 NEXT_INSN (PREV_INSN (range2after))
1893 PREV_INSN (rangenext)
1894 = PREV_INSN (range2after);
1895 PREV_INSN (range2after)
1896 = PREV_INSN (range1beg);
1897 NEXT_INSN (range2after) = range1beg;
1898 NEXT_INSN (PREV_INSN (range1beg))
1900 PREV_INSN (range1beg) = range2after;
1902 else if (NOTE_LINE_NUMBER (range2after)
1903 == NOTE_INSN_LOOP_BEG)
1913 /* Now that the jump has been tensioned,
1914 try cross jumping: check for identical code
1915 before the jump and before its target label. */
1917 /* First, cross jumping of conditional jumps: */
1919 if (cross_jump && condjump_p (insn))
1921 rtx newjpos, newlpos;
1922 rtx x = prev_real_insn (JUMP_LABEL (insn));
1924 /* A conditional jump may be crossjumped
1925 only if the place it jumps to follows
1926 an opposing jump that comes back here. */
1928 if (x != 0 && ! jump_back_p (x, insn))
1929 /* We have no opposing jump;
1930 cannot cross jump this insn. */
1934 /* TARGET is nonzero if it is ok to cross jump
1935 to code before TARGET. If so, see if matches. */
1937 find_cross_jump (insn, x, 2,
1938 &newjpos, &newlpos);
1942 do_cross_jump (insn, newjpos, newlpos);
1943 /* Make the old conditional jump
1944 into an unconditional one. */
1945 SET_SRC (PATTERN (insn))
1946 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1947 INSN_CODE (insn) = -1;
1948 emit_barrier_after (insn);
1949 /* Add to jump_chain unless this is a new label
1950 whose UID is too large. */
1951 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1953 jump_chain[INSN_UID (insn)]
1954 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1955 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1962 /* Cross jumping of unconditional jumps:
1963 a few differences. */
1965 if (cross_jump && simplejump_p (insn))
1967 rtx newjpos, newlpos;
1972 /* TARGET is nonzero if it is ok to cross jump
1973 to code before TARGET. If so, see if matches. */
1974 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1975 &newjpos, &newlpos);
1977 /* If cannot cross jump to code before the label,
1978 see if we can cross jump to another jump to
1980 /* Try each other jump to this label. */
1981 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1982 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1983 target != 0 && newjpos == 0;
1984 target = jump_chain[INSN_UID (target)])
1986 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1987 /* Ignore TARGET if it's deleted. */
1988 && ! INSN_DELETED_P (target))
1989 find_cross_jump (insn, target, 2,
1990 &newjpos, &newlpos);
1994 do_cross_jump (insn, newjpos, newlpos);
2000 /* This code was dead in the previous jump.c! */
2001 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2003 /* Return insns all "jump to the same place"
2004 so we can cross-jump between any two of them. */
2006 rtx newjpos, newlpos, target;
2010 /* If cannot cross jump to code before the label,
2011 see if we can cross jump to another jump to
2013 /* Try each other jump to this label. */
2014 for (target = jump_chain[0];
2015 target != 0 && newjpos == 0;
2016 target = jump_chain[INSN_UID (target)])
2018 && ! INSN_DELETED_P (target)
2019 && GET_CODE (PATTERN (target)) == RETURN)
2020 find_cross_jump (insn, target, 2,
2021 &newjpos, &newlpos);
2025 do_cross_jump (insn, newjpos, newlpos);
2036 /* Delete extraneous line number notes.
2037 Note that two consecutive notes for different lines are not really
2038 extraneous. There should be some indication where that line belonged,
2039 even if it became empty. */
2044 for (insn = f; insn; insn = NEXT_INSN (insn))
2045 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2047 /* Delete this note if it is identical to previous note. */
2049 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2050 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2063 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2064 in front of it. If the machine allows it at this point (we might be
2065 after reload for a leaf routine), it will improve optimization for it
2066 to be there. We do this both here and at the start of this pass since
2067 the RETURN might have been deleted by some of our optimizations. */
2068 insn = get_last_insn ();
2069 while (insn && GET_CODE (insn) == NOTE)
2070 insn = PREV_INSN (insn);
2072 if (insn && GET_CODE (insn) != BARRIER)
2074 emit_jump_insn (gen_return ());
2080 /* CAN_REACH_END is persistent for each function. Once set it should
2081 not be cleared. This is especially true for the case where we
2082 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2083 the front-end before compiling each function. */
2084 if (calculate_can_reach_end (last_insn, 0, 1))
2087 /* Show JUMP_CHAIN no longer valid. */
2091 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2092 notes whose labels don't occur in the insn any more. Returns the
2093 largest INSN_UID found. */
2098 int largest_uid = 0;
2101 for (insn = f; insn; insn = NEXT_INSN (insn))
2103 if (GET_CODE (insn) == CODE_LABEL)
2104 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2105 else if (GET_CODE (insn) == JUMP_INSN)
2106 JUMP_LABEL (insn) = 0;
2107 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2111 for (note = REG_NOTES (insn); note; note = next)
2113 next = XEXP (note, 1);
2114 if (REG_NOTE_KIND (note) == REG_LABEL
2115 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2116 remove_note (insn, note);
2119 if (INSN_UID (insn) > largest_uid)
2120 largest_uid = INSN_UID (insn);
2126 /* Delete insns following barriers, up to next label.
2128 Also delete no-op jumps created by gcse. */
2130 delete_barrier_successors (f)
2135 for (insn = f; insn;)
2137 if (GET_CODE (insn) == BARRIER)
2139 insn = NEXT_INSN (insn);
2140 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2142 if (GET_CODE (insn) == JUMP_INSN)
2144 /* Detect when we're deleting a tablejump; get rid of
2145 the jump table as well. */
2146 rtx next1 = next_nonnote_insn (insn);
2147 rtx next2 = next1 ? next_nonnote_insn (next1) : 0;
2148 if (next2 && GET_CODE (next1) == CODE_LABEL
2149 && GET_CODE (next2) == JUMP_INSN
2150 && (GET_CODE (PATTERN (next2)) == ADDR_VEC
2151 || GET_CODE (PATTERN (next2)) == ADDR_DIFF_VEC))
2157 insn = delete_insn (insn);
2159 else if (GET_CODE (insn) == NOTE
2160 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2161 insn = NEXT_INSN (insn);
2163 insn = delete_insn (insn);
2165 /* INSN is now the code_label. */
2167 /* Also remove (set (pc) (pc)) insns which can be created by
2168 gcse. We eliminate such insns now to avoid having them
2169 cause problems later. */
2170 else if (GET_CODE (insn) == JUMP_INSN
2171 && SET_SRC (PATTERN (insn)) == pc_rtx
2172 && SET_DEST (PATTERN (insn)) == pc_rtx)
2173 insn = delete_insn (insn);
2176 insn = NEXT_INSN (insn);
2180 /* Mark the label each jump jumps to.
2181 Combine consecutive labels, and count uses of labels.
2183 For each label, make a chain (using `jump_chain')
2184 of all the *unconditional* jumps that jump to it;
2185 also make a chain of all returns.
2187 CROSS_JUMP indicates whether we are doing cross jumping
2188 and if we are whether we will be paying attention to
2189 death notes or not. */
2192 mark_all_labels (f, cross_jump)
2198 for (insn = f; insn; insn = NEXT_INSN (insn))
2199 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2201 mark_jump_label (PATTERN (insn), insn, cross_jump);
2202 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2204 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2206 jump_chain[INSN_UID (insn)]
2207 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2208 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2210 if (GET_CODE (PATTERN (insn)) == RETURN)
2212 jump_chain[INSN_UID (insn)] = jump_chain[0];
2213 jump_chain[0] = insn;
2219 /* Delete all labels already not referenced.
2220 Also find and return the last insn. */
2223 delete_unreferenced_labels (f)
2226 rtx final = NULL_RTX;
2229 for (insn = f; insn; )
2231 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2232 insn = delete_insn (insn);
2236 insn = NEXT_INSN (insn);
2243 /* Delete various simple forms of moves which have no necessary
2247 delete_noop_moves (f)
2252 for (insn = f; insn; )
2254 next = NEXT_INSN (insn);
2256 if (GET_CODE (insn) == INSN)
2258 register rtx body = PATTERN (insn);
2260 /* Combine stack_adjusts with following push_insns. */
2261 #ifdef PUSH_ROUNDING
2262 if (GET_CODE (body) == SET
2263 && SET_DEST (body) == stack_pointer_rtx
2264 && GET_CODE (SET_SRC (body)) == PLUS
2265 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2266 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2267 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2270 rtx stack_adjust_insn = insn;
2271 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2272 int total_pushed = 0;
2275 /* Find all successive push insns. */
2277 /* Don't convert more than three pushes;
2278 that starts adding too many displaced addresses
2279 and the whole thing starts becoming a losing
2284 p = next_nonnote_insn (p);
2285 if (p == 0 || GET_CODE (p) != INSN)
2287 pbody = PATTERN (p);
2288 if (GET_CODE (pbody) != SET)
2290 dest = SET_DEST (pbody);
2291 /* Allow a no-op move between the adjust and the push. */
2292 if (GET_CODE (dest) == REG
2293 && GET_CODE (SET_SRC (pbody)) == REG
2294 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2296 if (! (GET_CODE (dest) == MEM
2297 && GET_CODE (XEXP (dest, 0)) == POST_INC
2298 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2301 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2302 > stack_adjust_amount)
2304 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2307 /* Discard the amount pushed from the stack adjust;
2308 maybe eliminate it entirely. */
2309 if (total_pushed >= stack_adjust_amount)
2311 delete_computation (stack_adjust_insn);
2312 total_pushed = stack_adjust_amount;
2315 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2316 = GEN_INT (stack_adjust_amount - total_pushed);
2318 /* Change the appropriate push insns to ordinary stores. */
2320 while (total_pushed > 0)
2323 p = next_nonnote_insn (p);
2324 if (GET_CODE (p) != INSN)
2326 pbody = PATTERN (p);
2327 if (GET_CODE (pbody) != SET)
2329 dest = SET_DEST (pbody);
2330 /* Allow a no-op move between the adjust and the push. */
2331 if (GET_CODE (dest) == REG
2332 && GET_CODE (SET_SRC (pbody)) == REG
2333 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2335 if (! (GET_CODE (dest) == MEM
2336 && GET_CODE (XEXP (dest, 0)) == POST_INC
2337 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2339 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2340 /* If this push doesn't fully fit in the space
2341 of the stack adjust that we deleted,
2342 make another stack adjust here for what we
2343 didn't use up. There should be peepholes
2344 to recognize the resulting sequence of insns. */
2345 if (total_pushed < 0)
2347 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2348 GEN_INT (- total_pushed)),
2353 = plus_constant (stack_pointer_rtx, total_pushed);
2358 /* Detect and delete no-op move instructions
2359 resulting from not allocating a parameter in a register. */
2361 if (GET_CODE (body) == SET
2362 && (SET_DEST (body) == SET_SRC (body)
2363 || (GET_CODE (SET_DEST (body)) == MEM
2364 && GET_CODE (SET_SRC (body)) == MEM
2365 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2366 && ! (GET_CODE (SET_DEST (body)) == MEM
2367 && MEM_VOLATILE_P (SET_DEST (body)))
2368 && ! (GET_CODE (SET_SRC (body)) == MEM
2369 && MEM_VOLATILE_P (SET_SRC (body))))
2370 delete_computation (insn);
2372 /* Detect and ignore no-op move instructions
2373 resulting from smart or fortuitous register allocation. */
2375 else if (GET_CODE (body) == SET)
2377 int sreg = true_regnum (SET_SRC (body));
2378 int dreg = true_regnum (SET_DEST (body));
2380 if (sreg == dreg && sreg >= 0)
2382 else if (sreg >= 0 && dreg >= 0)
2385 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2386 sreg, NULL_PTR, dreg,
2387 GET_MODE (SET_SRC (body)));
2390 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2392 /* DREG may have been the target of a REG_DEAD note in
2393 the insn which makes INSN redundant. If so, reorg
2394 would still think it is dead. So search for such a
2395 note and delete it if we find it. */
2396 if (! find_regno_note (insn, REG_UNUSED, dreg))
2397 for (trial = prev_nonnote_insn (insn);
2398 trial && GET_CODE (trial) != CODE_LABEL;
2399 trial = prev_nonnote_insn (trial))
2400 if (find_regno_note (trial, REG_DEAD, dreg))
2402 remove_death (dreg, trial);
2406 /* Deleting insn could lose a death-note for SREG. */
2407 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2409 /* Change this into a USE so that we won't emit
2410 code for it, but still can keep the note. */
2412 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2413 INSN_CODE (insn) = -1;
2414 /* Remove all reg notes but the REG_DEAD one. */
2415 REG_NOTES (insn) = trial;
2416 XEXP (trial, 1) = NULL_RTX;
2422 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2423 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2425 GET_MODE (SET_DEST (body))))
2427 /* This handles the case where we have two consecutive
2428 assignments of the same constant to pseudos that didn't
2429 get a hard reg. Each SET from the constant will be
2430 converted into a SET of the spill register and an
2431 output reload will be made following it. This produces
2432 two loads of the same constant into the same spill
2437 /* Look back for a death note for the first reg.
2438 If there is one, it is no longer accurate. */
2439 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2441 if ((GET_CODE (in_insn) == INSN
2442 || GET_CODE (in_insn) == JUMP_INSN)
2443 && find_regno_note (in_insn, REG_DEAD, dreg))
2445 remove_death (dreg, in_insn);
2448 in_insn = PREV_INSN (in_insn);
2451 /* Delete the second load of the value. */
2455 else if (GET_CODE (body) == PARALLEL)
2457 /* If each part is a set between two identical registers or
2458 a USE or CLOBBER, delete the insn. */
2462 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2464 tem = XVECEXP (body, 0, i);
2465 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2468 if (GET_CODE (tem) != SET
2469 || (sreg = true_regnum (SET_SRC (tem))) < 0
2470 || (dreg = true_regnum (SET_DEST (tem))) < 0
2478 /* Also delete insns to store bit fields if they are no-ops. */
2479 /* Not worth the hair to detect this in the big-endian case. */
2480 else if (! BYTES_BIG_ENDIAN
2481 && GET_CODE (body) == SET
2482 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2483 && XEXP (SET_DEST (body), 2) == const0_rtx
2484 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2485 && ! (GET_CODE (SET_SRC (body)) == MEM
2486 && MEM_VOLATILE_P (SET_SRC (body))))
2493 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2494 If so indicate that this function can drop off the end by returning
2497 CHECK_DELETED indicates whether we must check if the note being
2498 searched for has the deleted flag set.
2500 DELETE_FINAL_NOTE indicates whether we should delete the note
2504 calculate_can_reach_end (last, check_deleted, delete_final_note)
2507 int delete_final_note;
2512 while (insn != NULL_RTX)
2516 /* One label can follow the end-note: the return label. */
2517 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2519 /* Ordinary insns can follow it if returning a structure. */
2520 else if (GET_CODE (insn) == INSN)
2522 /* If machine uses explicit RETURN insns, no epilogue,
2523 then one of them follows the note. */
2524 else if (GET_CODE (insn) == JUMP_INSN
2525 && GET_CODE (PATTERN (insn)) == RETURN)
2527 /* A barrier can follow the return insn. */
2528 else if (GET_CODE (insn) == BARRIER)
2530 /* Other kinds of notes can follow also. */
2531 else if (GET_CODE (insn) == NOTE
2532 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2538 insn = PREV_INSN (insn);
2541 /* See if we backed up to the appropriate type of note. */
2542 if (insn != NULL_RTX
2543 && GET_CODE (insn) == NOTE
2544 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2545 && (check_deleted == 0
2546 || ! INSN_DELETED_P (insn)))
2548 if (delete_final_note)
2556 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2557 jump. Assume that this unconditional jump is to the exit test code. If
2558 the code is sufficiently simple, make a copy of it before INSN,
2559 followed by a jump to the exit of the loop. Then delete the unconditional
2562 Return 1 if we made the change, else 0.
2564 This is only safe immediately after a regscan pass because it uses the
2565 values of regno_first_uid and regno_last_uid. */
2568 duplicate_loop_exit_test (loop_start)
2571 rtx insn, set, reg, p, link;
2572 rtx copy = 0, first_copy = 0;
2574 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2576 int max_reg = max_reg_num ();
2579 /* Scan the exit code. We do not perform this optimization if any insn:
2583 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2584 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2585 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2588 We also do not do this if we find an insn with ASM_OPERANDS. While
2589 this restriction should not be necessary, copying an insn with
2590 ASM_OPERANDS can confuse asm_noperands in some cases.
2592 Also, don't do this if the exit code is more than 20 insns. */
2594 for (insn = exitcode;
2596 && ! (GET_CODE (insn) == NOTE
2597 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2598 insn = NEXT_INSN (insn))
2600 switch (GET_CODE (insn))
2606 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2607 a jump immediately after the loop start that branches outside
2608 the loop but within an outer loop, near the exit test.
2609 If we copied this exit test and created a phony
2610 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2611 before the exit test look like these could be safely moved
2612 out of the loop even if they actually may be never executed.
2613 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2615 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2616 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2620 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2621 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2622 /* If we were to duplicate this code, we would not move
2623 the BLOCK notes, and so debugging the moved code would
2624 be difficult. Thus, we only move the code with -O2 or
2631 /* The code below would grossly mishandle REG_WAS_0 notes,
2632 so get rid of them here. */
2633 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2634 remove_note (insn, p);
2635 if (++num_insns > 20
2636 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2637 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2638 || asm_noperands (PATTERN (insn)) > 0)
2646 /* Unless INSN is zero, we can do the optimization. */
2652 /* See if any insn sets a register only used in the loop exit code and
2653 not a user variable. If so, replace it with a new register. */
2654 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2655 if (GET_CODE (insn) == INSN
2656 && (set = single_set (insn)) != 0
2657 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2658 || (GET_CODE (reg) == SUBREG
2659 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2660 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2661 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2663 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2664 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2669 /* We can do the replacement. Allocate reg_map if this is the
2670 first replacement we found. */
2673 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2674 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2677 REG_LOOP_TEST_P (reg) = 1;
2679 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2683 /* Now copy each insn. */
2684 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2686 switch (GET_CODE (insn))
2689 copy = emit_barrier_before (loop_start);
2692 /* Only copy line-number notes. */
2693 if (NOTE_LINE_NUMBER (insn) >= 0)
2695 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2696 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2701 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2703 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2705 mark_jump_label (PATTERN (copy), copy, 0);
2707 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2709 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2710 if (REG_NOTE_KIND (link) != REG_LABEL)
2712 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2715 if (reg_map && REG_NOTES (copy))
2716 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2720 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2722 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2723 mark_jump_label (PATTERN (copy), copy, 0);
2724 if (REG_NOTES (insn))
2726 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2728 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2731 /* If this is a simple jump, add it to the jump chain. */
2733 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2734 && simplejump_p (copy))
2736 jump_chain[INSN_UID (copy)]
2737 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2738 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2746 /* Record the first insn we copied. We need it so that we can
2747 scan the copied insns for new pseudo registers. */
2752 /* Now clean up by emitting a jump to the end label and deleting the jump
2753 at the start of the loop. */
2754 if (! copy || GET_CODE (copy) != BARRIER)
2756 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2759 /* Record the first insn we copied. We need it so that we can
2760 scan the copied insns for new pseudo registers. This may not
2761 be strictly necessary since we should have copied at least one
2762 insn above. But I am going to be safe. */
2766 mark_jump_label (PATTERN (copy), copy, 0);
2767 if (INSN_UID (copy) < max_jump_chain
2768 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2770 jump_chain[INSN_UID (copy)]
2771 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2772 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2774 emit_barrier_before (loop_start);
2777 /* Now scan from the first insn we copied to the last insn we copied
2778 (copy) for new pseudo registers. Do this after the code to jump to
2779 the end label since that might create a new pseudo too. */
2780 reg_scan_update (first_copy, copy, max_reg);
2782 /* Mark the exit code as the virtual top of the converted loop. */
2783 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2785 delete_insn (next_nonnote_insn (loop_start));
2790 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2791 loop-end notes between START and END out before START. Assume that
2792 END is not such a note. START may be such a note. Returns the value
2793 of the new starting insn, which may be different if the original start
2797 squeeze_notes (start, end)
2803 for (insn = start; insn != end; insn = next)
2805 next = NEXT_INSN (insn);
2806 if (GET_CODE (insn) == NOTE
2807 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2808 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2809 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2810 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2811 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2812 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2818 rtx prev = PREV_INSN (insn);
2819 PREV_INSN (insn) = PREV_INSN (start);
2820 NEXT_INSN (insn) = start;
2821 NEXT_INSN (PREV_INSN (insn)) = insn;
2822 PREV_INSN (NEXT_INSN (insn)) = insn;
2823 NEXT_INSN (prev) = next;
2824 PREV_INSN (next) = prev;
2832 /* Compare the instructions before insn E1 with those before E2
2833 to find an opportunity for cross jumping.
2834 (This means detecting identical sequences of insns followed by
2835 jumps to the same place, or followed by a label and a jump
2836 to that label, and replacing one with a jump to the other.)
2838 Assume E1 is a jump that jumps to label E2
2839 (that is not always true but it might as well be).
2840 Find the longest possible equivalent sequences
2841 and store the first insns of those sequences into *F1 and *F2.
2842 Store zero there if no equivalent preceding instructions are found.
2844 We give up if we find a label in stream 1.
2845 Actually we could transfer that label into stream 2. */
2848 find_cross_jump (e1, e2, minimum, f1, f2)
2853 register rtx i1 = e1, i2 = e2;
2854 register rtx p1, p2;
2857 rtx last1 = 0, last2 = 0;
2858 rtx afterlast1 = 0, afterlast2 = 0;
2865 i1 = prev_nonnote_insn (i1);
2867 i2 = PREV_INSN (i2);
2868 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2869 i2 = PREV_INSN (i2);
2874 /* Don't allow the range of insns preceding E1 or E2
2875 to include the other (E2 or E1). */
2876 if (i2 == e1 || i1 == e2)
2879 /* If we will get to this code by jumping, those jumps will be
2880 tensioned to go directly to the new label (before I2),
2881 so this cross-jumping won't cost extra. So reduce the minimum. */
2882 if (GET_CODE (i1) == CODE_LABEL)
2888 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2891 /* Avoid moving insns across EH regions if either of the insns
2894 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2895 && !in_same_eh_region (i1, i2))
2901 /* If this is a CALL_INSN, compare register usage information.
2902 If we don't check this on stack register machines, the two
2903 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2904 numbers of stack registers in the same basic block.
2905 If we don't check this on machines with delay slots, a delay slot may
2906 be filled that clobbers a parameter expected by the subroutine.
2908 ??? We take the simple route for now and assume that if they're
2909 equal, they were constructed identically. */
2911 if (GET_CODE (i1) == CALL_INSN
2912 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2913 CALL_INSN_FUNCTION_USAGE (i2)))
2917 /* If cross_jump_death_matters is not 0, the insn's mode
2918 indicates whether or not the insn contains any stack-like
2921 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2923 /* If register stack conversion has already been done, then
2924 death notes must also be compared before it is certain that
2925 the two instruction streams match. */
2928 HARD_REG_SET i1_regset, i2_regset;
2930 CLEAR_HARD_REG_SET (i1_regset);
2931 CLEAR_HARD_REG_SET (i2_regset);
2933 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2934 if (REG_NOTE_KIND (note) == REG_DEAD
2935 && STACK_REG_P (XEXP (note, 0)))
2936 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2938 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2939 if (REG_NOTE_KIND (note) == REG_DEAD
2940 && STACK_REG_P (XEXP (note, 0)))
2941 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2943 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2952 /* Don't allow old-style asm or volatile extended asms to be accepted
2953 for cross jumping purposes. It is conceptually correct to allow
2954 them, since cross-jumping preserves the dynamic instruction order
2955 even though it is changing the static instruction order. However,
2956 if an asm is being used to emit an assembler pseudo-op, such as
2957 the MIPS `.set reorder' pseudo-op, then the static instruction order
2958 matters and it must be preserved. */
2959 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2960 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2961 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2964 if (lose || GET_CODE (p1) != GET_CODE (p2)
2965 || ! rtx_renumbered_equal_p (p1, p2))
2967 /* The following code helps take care of G++ cleanups. */
2971 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2972 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2973 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2974 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2975 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2976 /* If the equivalences are not to a constant, they may
2977 reference pseudos that no longer exist, so we can't
2979 && CONSTANT_P (XEXP (equiv1, 0))
2980 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2982 rtx s1 = single_set (i1);
2983 rtx s2 = single_set (i2);
2984 if (s1 != 0 && s2 != 0
2985 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2987 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2988 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2989 if (! rtx_renumbered_equal_p (p1, p2))
2991 else if (apply_change_group ())
2996 /* Insns fail to match; cross jumping is limited to the following
3000 /* Don't allow the insn after a compare to be shared by
3001 cross-jumping unless the compare is also shared.
3002 Here, if either of these non-matching insns is a compare,
3003 exclude the following insn from possible cross-jumping. */
3004 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3005 last1 = afterlast1, last2 = afterlast2, ++minimum;
3008 /* If cross-jumping here will feed a jump-around-jump
3009 optimization, this jump won't cost extra, so reduce
3011 if (GET_CODE (i1) == JUMP_INSN
3013 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3019 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3021 /* Ok, this insn is potentially includable in a cross-jump here. */
3022 afterlast1 = last1, afterlast2 = last2;
3023 last1 = i1, last2 = i2, --minimum;
3027 if (minimum <= 0 && last1 != 0 && last1 != e1)
3028 *f1 = last1, *f2 = last2;
3032 do_cross_jump (insn, newjpos, newlpos)
3033 rtx insn, newjpos, newlpos;
3035 /* Find an existing label at this point
3036 or make a new one if there is none. */
3037 register rtx label = get_label_before (newlpos);
3039 /* Make the same jump insn jump to the new point. */
3040 if (GET_CODE (PATTERN (insn)) == RETURN)
3042 /* Remove from jump chain of returns. */
3043 delete_from_jump_chain (insn);
3044 /* Change the insn. */
3045 PATTERN (insn) = gen_jump (label);
3046 INSN_CODE (insn) = -1;
3047 JUMP_LABEL (insn) = label;
3048 LABEL_NUSES (label)++;
3049 /* Add to new the jump chain. */
3050 if (INSN_UID (label) < max_jump_chain
3051 && INSN_UID (insn) < max_jump_chain)
3053 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3054 jump_chain[INSN_UID (label)] = insn;
3058 redirect_jump (insn, label);
3060 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3061 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3062 the NEWJPOS stream. */
3064 while (newjpos != insn)
3068 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3069 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3070 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3071 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3072 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3073 remove_note (newlpos, lnote);
3075 delete_insn (newjpos);
3076 newjpos = next_real_insn (newjpos);
3077 newlpos = next_real_insn (newlpos);
3081 /* Return the label before INSN, or put a new label there. */
3084 get_label_before (insn)
3089 /* Find an existing label at this point
3090 or make a new one if there is none. */
3091 label = prev_nonnote_insn (insn);
3093 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3095 rtx prev = PREV_INSN (insn);
3097 label = gen_label_rtx ();
3098 emit_label_after (label, prev);
3099 LABEL_NUSES (label) = 0;
3104 /* Return the label after INSN, or put a new label there. */
3107 get_label_after (insn)
3112 /* Find an existing label at this point
3113 or make a new one if there is none. */
3114 label = next_nonnote_insn (insn);
3116 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3118 label = gen_label_rtx ();
3119 emit_label_after (label, insn);
3120 LABEL_NUSES (label) = 0;
3125 /* Return 1 if INSN is a jump that jumps to right after TARGET
3126 only on the condition that TARGET itself would drop through.
3127 Assumes that TARGET is a conditional jump. */
3130 jump_back_p (insn, target)
3134 enum rtx_code codei, codet;
3136 if (simplejump_p (insn) || ! condjump_p (insn)
3137 || simplejump_p (target)
3138 || target != prev_real_insn (JUMP_LABEL (insn)))
3141 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3142 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3144 codei = GET_CODE (cinsn);
3145 codet = GET_CODE (ctarget);
3147 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3149 if (! can_reverse_comparison_p (cinsn, insn))
3151 codei = reverse_condition (codei);
3154 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3156 if (! can_reverse_comparison_p (ctarget, target))
3158 codet = reverse_condition (codet);
3161 return (codei == codet
3162 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3163 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3166 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3167 return non-zero if it is safe to reverse this comparison. It is if our
3168 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3169 this is known to be an integer comparison. */
3172 can_reverse_comparison_p (comparison, insn)
3178 /* If this is not actually a comparison, we can't reverse it. */
3179 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3182 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3183 /* If this is an NE comparison, it is safe to reverse it to an EQ
3184 comparison and vice versa, even for floating point. If no operands
3185 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3186 always false and NE is always true, so the reversal is also valid. */
3188 || GET_CODE (comparison) == NE
3189 || GET_CODE (comparison) == EQ)
3192 arg0 = XEXP (comparison, 0);
3194 /* Make sure ARG0 is one of the actual objects being compared. If we
3195 can't do this, we can't be sure the comparison can be reversed.
3197 Handle cc0 and a MODE_CC register. */
3198 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3204 rtx prev = prev_nonnote_insn (insn);
3207 /* If the comparison itself was a loop invariant, it could have been
3208 hoisted out of the loop. If we proceed to unroll such a loop, then
3209 we may not be able to find the comparison when copying the loop.
3211 Returning zero in that case is the safe thing to do. */
3215 set = single_set (prev);
3216 if (set == 0 || SET_DEST (set) != arg0)
3219 arg0 = SET_SRC (set);
3221 if (GET_CODE (arg0) == COMPARE)
3222 arg0 = XEXP (arg0, 0);
3225 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3226 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3227 return (GET_CODE (arg0) == CONST_INT
3228 || (GET_MODE (arg0) != VOIDmode
3229 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3230 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3233 /* Given an rtx-code for a comparison, return the code
3234 for the negated comparison.
3235 WATCH OUT! reverse_condition is not safe to use on a jump
3236 that might be acting on the results of an IEEE floating point comparison,
3237 because of the special treatment of non-signaling nans in comparisons.
3238 Use can_reverse_comparison_p to be sure. */
3241 reverse_condition (code)
3282 /* Similar, but return the code when two operands of a comparison are swapped.
3283 This IS safe for IEEE floating-point. */
3286 swap_condition (code)
3325 /* Given a comparison CODE, return the corresponding unsigned comparison.
3326 If CODE is an equality comparison or already an unsigned comparison,
3327 CODE is returned. */
3330 unsigned_condition (code)
3360 /* Similarly, return the signed version of a comparison. */
3363 signed_condition (code)
3393 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3394 truth of CODE1 implies the truth of CODE2. */
3397 comparison_dominates_p (code1, code2)
3398 enum rtx_code code1, code2;
3406 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3411 if (code2 == LE || code2 == NE)
3416 if (code2 == GE || code2 == NE)
3421 if (code2 == LEU || code2 == NE)
3426 if (code2 == GEU || code2 == NE)
3437 /* Return 1 if INSN is an unconditional jump and nothing else. */
3443 return (GET_CODE (insn) == JUMP_INSN
3444 && GET_CODE (PATTERN (insn)) == SET
3445 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3446 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3449 /* Return nonzero if INSN is a (possibly) conditional jump
3450 and nothing more. */
3456 register rtx x = PATTERN (insn);
3457 if (GET_CODE (x) != SET)
3459 if (GET_CODE (SET_DEST (x)) != PC)
3461 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3463 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3465 if (XEXP (SET_SRC (x), 2) == pc_rtx
3466 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3467 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3469 if (XEXP (SET_SRC (x), 1) == pc_rtx
3470 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3471 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3476 /* Return nonzero if INSN is a (possibly) conditional jump
3477 and nothing more. */
3480 condjump_in_parallel_p (insn)
3483 register rtx x = PATTERN (insn);
3485 if (GET_CODE (x) != PARALLEL)
3488 x = XVECEXP (x, 0, 0);
3490 if (GET_CODE (x) != SET)
3492 if (GET_CODE (SET_DEST (x)) != PC)
3494 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3496 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3498 if (XEXP (SET_SRC (x), 2) == pc_rtx
3499 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3500 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3502 if (XEXP (SET_SRC (x), 1) == pc_rtx
3503 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3504 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3509 /* Return the label of a conditional jump. */
3512 condjump_label (insn)
3515 register rtx x = PATTERN (insn);
3517 if (GET_CODE (x) == PARALLEL)
3518 x = XVECEXP (x, 0, 0);
3519 if (GET_CODE (x) != SET)
3521 if (GET_CODE (SET_DEST (x)) != PC)
3524 if (GET_CODE (x) == LABEL_REF)
3526 if (GET_CODE (x) != IF_THEN_ELSE)
3528 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3530 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3535 /* Return true if INSN is a (possibly conditional) return insn. */
3538 returnjump_p_1 (loc, data)
3540 void *data ATTRIBUTE_UNUSED;
3543 return GET_CODE (x) == RETURN;
3550 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3555 /* Return 1 if X is an RTX that does nothing but set the condition codes
3556 and CLOBBER or USE registers.
3557 Return -1 if X does explicitly set the condition codes,
3558 but also does other things. */
3562 rtx x ATTRIBUTE_UNUSED;
3564 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3566 if (GET_CODE (x) == PARALLEL)
3570 int other_things = 0;
3571 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3573 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3574 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3576 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3579 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3585 /* Follow any unconditional jump at LABEL;
3586 return the ultimate label reached by any such chain of jumps.
3587 If LABEL is not followed by a jump, return LABEL.
3588 If the chain loops or we can't find end, return LABEL,
3589 since that tells caller to avoid changing the insn.
3591 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3592 a USE or CLOBBER. */
3595 follow_jumps (label)
3600 register rtx value = label;
3605 && (insn = next_active_insn (value)) != 0
3606 && GET_CODE (insn) == JUMP_INSN
3607 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3608 || GET_CODE (PATTERN (insn)) == RETURN)
3609 && (next = NEXT_INSN (insn))
3610 && GET_CODE (next) == BARRIER);
3613 /* Don't chain through the insn that jumps into a loop
3614 from outside the loop,
3615 since that would create multiple loop entry jumps
3616 and prevent loop optimization. */
3618 if (!reload_completed)
3619 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3620 if (GET_CODE (tem) == NOTE
3621 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3622 /* ??? Optional. Disables some optimizations, but makes
3623 gcov output more accurate with -O. */
3624 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3627 /* If we have found a cycle, make the insn jump to itself. */
3628 if (JUMP_LABEL (insn) == label)
3631 tem = next_active_insn (JUMP_LABEL (insn));
3632 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3633 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3636 value = JUMP_LABEL (insn);
3643 /* Assuming that field IDX of X is a vector of label_refs,
3644 replace each of them by the ultimate label reached by it.
3645 Return nonzero if a change is made.
3646 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3649 tension_vector_labels (x, idx)
3655 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3657 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3658 register rtx nlabel = follow_jumps (olabel);
3659 if (nlabel && nlabel != olabel)
3661 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3662 ++LABEL_NUSES (nlabel);
3663 if (--LABEL_NUSES (olabel) == 0)
3664 delete_insn (olabel);
3671 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3672 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3673 in INSN, then store one of them in JUMP_LABEL (INSN).
3674 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3675 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3676 Also, when there are consecutive labels, canonicalize on the last of them.
3678 Note that two labels separated by a loop-beginning note
3679 must be kept distinct if we have not yet done loop-optimization,
3680 because the gap between them is where loop-optimize
3681 will want to move invariant code to. CROSS_JUMP tells us
3682 that loop-optimization is done with.
3684 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3685 two labels distinct if they are separated by only USE or CLOBBER insns. */
3688 mark_jump_label (x, insn, cross_jump)
3693 register RTX_CODE code = GET_CODE (x);
3711 /* If this is a constant-pool reference, see if it is a label. */
3712 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3713 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3714 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3719 rtx label = XEXP (x, 0);
3724 if (GET_CODE (label) != CODE_LABEL)
3727 /* Ignore references to labels of containing functions. */
3728 if (LABEL_REF_NONLOCAL_P (x))
3731 /* If there are other labels following this one,
3732 replace it with the last of the consecutive labels. */
3733 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3735 if (GET_CODE (next) == CODE_LABEL)
3737 else if (cross_jump && GET_CODE (next) == INSN
3738 && (GET_CODE (PATTERN (next)) == USE
3739 || GET_CODE (PATTERN (next)) == CLOBBER))
3741 else if (GET_CODE (next) != NOTE)
3743 else if (! cross_jump
3744 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3745 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3746 /* ??? Optional. Disables some optimizations, but
3747 makes gcov output more accurate with -O. */
3748 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3752 XEXP (x, 0) = label;
3753 if (! insn || ! INSN_DELETED_P (insn))
3754 ++LABEL_NUSES (label);
3758 if (GET_CODE (insn) == JUMP_INSN)
3759 JUMP_LABEL (insn) = label;
3761 /* If we've changed OLABEL and we had a REG_LABEL note
3762 for it, update it as well. */
3763 else if (label != olabel
3764 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3765 XEXP (note, 0) = label;
3767 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3769 else if (! find_reg_note (insn, REG_LABEL, label))
3771 /* This code used to ignore labels which refered to dispatch
3772 tables to avoid flow.c generating worse code.
3774 However, in the presense of global optimizations like
3775 gcse which call find_basic_blocks without calling
3776 life_analysis, not recording such labels will lead
3777 to compiler aborts because of inconsistencies in the
3778 flow graph. So we go ahead and record the label.
3780 It may also be the case that the optimization argument
3781 is no longer valid because of the more accurate cfg
3782 we build in find_basic_blocks -- it no longer pessimizes
3783 code when it finds a REG_LABEL note. */
3784 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3791 /* Do walk the labels in a vector, but not the first operand of an
3792 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3795 if (! INSN_DELETED_P (insn))
3797 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3799 for (i = 0; i < XVECLEN (x, eltnum); i++)
3800 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3808 fmt = GET_RTX_FORMAT (code);
3809 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3812 mark_jump_label (XEXP (x, i), insn, cross_jump);
3813 else if (fmt[i] == 'E')
3816 for (j = 0; j < XVECLEN (x, i); j++)
3817 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3822 /* If all INSN does is set the pc, delete it,
3823 and delete the insn that set the condition codes for it
3824 if that's what the previous thing was. */
3830 register rtx set = single_set (insn);
3832 if (set && GET_CODE (SET_DEST (set)) == PC)
3833 delete_computation (insn);
3836 /* Delete INSN and recursively delete insns that compute values used only
3837 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3838 If we are running before flow.c, we need do nothing since flow.c will
3839 delete dead code. We also can't know if the registers being used are
3840 dead or not at this point.
3842 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3843 nothing other than set a register that dies in this insn, we can delete
3846 On machines with CC0, if CC0 is used in this insn, we may be able to
3847 delete the insn that set it. */
3850 delete_computation (insn)
3856 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3858 rtx prev = prev_nonnote_insn (insn);
3859 /* We assume that at this stage
3860 CC's are always set explicitly
3861 and always immediately before the jump that
3862 will use them. So if the previous insn
3863 exists to set the CC's, delete it
3864 (unless it performs auto-increments, etc.). */
3865 if (prev && GET_CODE (prev) == INSN
3866 && sets_cc0_p (PATTERN (prev)))
3868 if (sets_cc0_p (PATTERN (prev)) > 0
3869 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3870 delete_computation (prev);
3872 /* Otherwise, show that cc0 won't be used. */
3873 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3874 cc0_rtx, REG_NOTES (prev));
3879 #ifdef INSN_SCHEDULING
3880 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
3881 reload has completed. The schedulers need to be fixed. Until
3882 they are, we must not rely on the death notes here. */
3883 if (reload_completed && flag_schedule_insns_after_reload)
3890 for (note = REG_NOTES (insn); note; note = next)
3894 next = XEXP (note, 1);
3896 if (REG_NOTE_KIND (note) != REG_DEAD
3897 /* Verify that the REG_NOTE is legitimate. */
3898 || GET_CODE (XEXP (note, 0)) != REG)
3901 for (our_prev = prev_nonnote_insn (insn);
3902 our_prev && GET_CODE (our_prev) == INSN;
3903 our_prev = prev_nonnote_insn (our_prev))
3905 /* If we reach a SEQUENCE, it is too complex to try to
3906 do anything with it, so give up. */
3907 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3910 if (GET_CODE (PATTERN (our_prev)) == USE
3911 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3912 /* reorg creates USEs that look like this. We leave them
3913 alone because reorg needs them for its own purposes. */
3916 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3918 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3921 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3923 /* If we find a SET of something else, we can't
3928 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3930 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3932 if (GET_CODE (part) == SET
3933 && SET_DEST (part) != XEXP (note, 0))
3937 if (i == XVECLEN (PATTERN (our_prev), 0))
3938 delete_computation (our_prev);
3940 else if (GET_CODE (PATTERN (our_prev)) == SET
3941 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3942 delete_computation (our_prev);
3947 /* If OUR_PREV references the register that dies here, it is an
3948 additional use. Hence any prior SET isn't dead. However, this
3949 insn becomes the new place for the REG_DEAD note. */
3950 if (reg_overlap_mentioned_p (XEXP (note, 0),
3951 PATTERN (our_prev)))
3953 XEXP (note, 1) = REG_NOTES (our_prev);
3954 REG_NOTES (our_prev) = note;
3963 /* Delete insn INSN from the chain of insns and update label ref counts.
3964 May delete some following insns as a consequence; may even delete
3965 a label elsewhere and insns that follow it.
3967 Returns the first insn after INSN that was not deleted. */
3973 register rtx next = NEXT_INSN (insn);
3974 register rtx prev = PREV_INSN (insn);
3975 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3976 register int dont_really_delete = 0;
3978 while (next && INSN_DELETED_P (next))
3979 next = NEXT_INSN (next);
3981 /* This insn is already deleted => return first following nondeleted. */
3982 if (INSN_DELETED_P (insn))
3986 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
3988 /* Don't delete user-declared labels. Convert them to special NOTEs
3990 if (was_code_label && LABEL_NAME (insn) != 0
3991 && optimize && ! dont_really_delete)
3993 PUT_CODE (insn, NOTE);
3994 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3995 NOTE_SOURCE_FILE (insn) = 0;
3996 dont_really_delete = 1;
3999 /* Mark this insn as deleted. */
4000 INSN_DELETED_P (insn) = 1;
4002 /* If this is an unconditional jump, delete it from the jump chain. */
4003 if (simplejump_p (insn))
4004 delete_from_jump_chain (insn);
4006 /* If instruction is followed by a barrier,
4007 delete the barrier too. */
4009 if (next != 0 && GET_CODE (next) == BARRIER)
4011 INSN_DELETED_P (next) = 1;
4012 next = NEXT_INSN (next);
4015 /* Patch out INSN (and the barrier if any) */
4017 if (optimize && ! dont_really_delete)
4021 NEXT_INSN (prev) = next;
4022 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4023 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4024 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4029 PREV_INSN (next) = prev;
4030 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4031 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4034 if (prev && NEXT_INSN (prev) == 0)
4035 set_last_insn (prev);
4038 /* If deleting a jump, decrement the count of the label,
4039 and delete the label if it is now unused. */
4041 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4043 rtx lab = JUMP_LABEL (insn), lab_next;
4045 if (--LABEL_NUSES (lab) == 0)
4047 /* This can delete NEXT or PREV,
4048 either directly if NEXT is JUMP_LABEL (INSN),
4049 or indirectly through more levels of jumps. */
4052 /* I feel a little doubtful about this loop,
4053 but I see no clean and sure alternative way
4054 to find the first insn after INSN that is not now deleted.
4055 I hope this works. */
4056 while (next && INSN_DELETED_P (next))
4057 next = NEXT_INSN (next);
4060 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4061 && GET_CODE (lab_next) == JUMP_INSN
4062 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4063 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4065 /* If we're deleting the tablejump, delete the dispatch table.
4066 We may not be able to kill the label immediately preceeding
4067 just yet, as it might be referenced in code leading up to
4069 delete_insn (lab_next);
4073 /* Likewise if we're deleting a dispatch table. */
4075 if (GET_CODE (insn) == JUMP_INSN
4076 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4077 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4079 rtx pat = PATTERN (insn);
4080 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4081 int len = XVECLEN (pat, diff_vec_p);
4083 for (i = 0; i < len; i++)
4084 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4085 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4086 while (next && INSN_DELETED_P (next))
4087 next = NEXT_INSN (next);
4091 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4092 prev = PREV_INSN (prev);
4094 /* If INSN was a label and a dispatch table follows it,
4095 delete the dispatch table. The tablejump must have gone already.
4096 It isn't useful to fall through into a table. */
4099 && NEXT_INSN (insn) != 0
4100 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4101 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4102 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4103 next = delete_insn (NEXT_INSN (insn));
4105 /* If INSN was a label, delete insns following it if now unreachable. */
4107 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4109 register RTX_CODE code;
4111 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4112 || code == NOTE || code == BARRIER
4113 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4116 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4117 next = NEXT_INSN (next);
4118 /* Keep going past other deleted labels to delete what follows. */
4119 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4120 next = NEXT_INSN (next);
4122 /* Note: if this deletes a jump, it can cause more
4123 deletion of unreachable code, after a different label.
4124 As long as the value from this recursive call is correct,
4125 this invocation functions correctly. */
4126 next = delete_insn (next);
4133 /* Advance from INSN till reaching something not deleted
4134 then return that. May return INSN itself. */
4137 next_nondeleted_insn (insn)
4140 while (INSN_DELETED_P (insn))
4141 insn = NEXT_INSN (insn);
4145 /* Delete a range of insns from FROM to TO, inclusive.
4146 This is for the sake of peephole optimization, so assume
4147 that whatever these insns do will still be done by a new
4148 peephole insn that will replace them. */
4151 delete_for_peephole (from, to)
4152 register rtx from, to;
4154 register rtx insn = from;
4158 register rtx next = NEXT_INSN (insn);
4159 register rtx prev = PREV_INSN (insn);
4161 if (GET_CODE (insn) != NOTE)
4163 INSN_DELETED_P (insn) = 1;
4165 /* Patch this insn out of the chain. */
4166 /* We don't do this all at once, because we
4167 must preserve all NOTEs. */
4169 NEXT_INSN (prev) = next;
4172 PREV_INSN (next) = prev;
4180 /* Note that if TO is an unconditional jump
4181 we *do not* delete the BARRIER that follows,
4182 since the peephole that replaces this sequence
4183 is also an unconditional jump in that case. */
4186 /* Invert the condition of the jump JUMP, and make it jump
4187 to label NLABEL instead of where it jumps now. */
4190 invert_jump (jump, nlabel)
4193 /* We have to either invert the condition and change the label or
4194 do neither. Either operation could fail. We first try to invert
4195 the jump. If that succeeds, we try changing the label. If that fails,
4196 we invert the jump back to what it was. */
4198 if (! invert_exp (PATTERN (jump), jump))
4201 if (redirect_jump (jump, nlabel))
4203 if (flag_branch_probabilities)
4205 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4207 /* An inverted jump means that a probability taken becomes a
4208 probability not taken. Subtract the branch probability from the
4209 probability base to convert it back to a taken probability.
4210 (We don't flip the probability on a branch that's never taken. */
4211 if (note && XINT (XEXP (note, 0), 0) >= 0)
4212 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4218 if (! invert_exp (PATTERN (jump), jump))
4219 /* This should just be putting it back the way it was. */
4225 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4227 Return 1 if we can do so, 0 if we cannot find a way to do so that
4228 matches a pattern. */
4231 invert_exp (x, insn)
4235 register RTX_CODE code;
4239 code = GET_CODE (x);
4241 if (code == IF_THEN_ELSE)
4243 register rtx comp = XEXP (x, 0);
4246 /* We can do this in two ways: The preferable way, which can only
4247 be done if this is not an integer comparison, is to reverse
4248 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4249 of the IF_THEN_ELSE. If we can't do either, fail. */
4251 if (can_reverse_comparison_p (comp, insn)
4252 && validate_change (insn, &XEXP (x, 0),
4253 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4254 GET_MODE (comp), XEXP (comp, 0),
4255 XEXP (comp, 1)), 0))
4259 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4260 validate_change (insn, &XEXP (x, 2), tem, 1);
4261 return apply_change_group ();
4264 fmt = GET_RTX_FORMAT (code);
4265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4268 if (! invert_exp (XEXP (x, i), insn))
4273 for (j = 0; j < XVECLEN (x, i); j++)
4274 if (!invert_exp (XVECEXP (x, i, j), insn))
4282 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4283 If the old jump target label is unused as a result,
4284 it and the code following it may be deleted.
4286 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4289 The return value will be 1 if the change was made, 0 if it wasn't (this
4290 can only occur for NLABEL == 0). */
4293 redirect_jump (jump, nlabel)
4296 register rtx olabel = JUMP_LABEL (jump);
4298 if (nlabel == olabel)
4301 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4304 /* If this is an unconditional branch, delete it from the jump_chain of
4305 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4306 have UID's in range and JUMP_CHAIN is valid). */
4307 if (jump_chain && (simplejump_p (jump)
4308 || GET_CODE (PATTERN (jump)) == RETURN))
4310 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4312 delete_from_jump_chain (jump);
4313 if (label_index < max_jump_chain
4314 && INSN_UID (jump) < max_jump_chain)
4316 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4317 jump_chain[label_index] = jump;
4321 JUMP_LABEL (jump) = nlabel;
4323 ++LABEL_NUSES (nlabel);
4325 if (olabel && --LABEL_NUSES (olabel) == 0)
4326 delete_insn (olabel);
4331 /* Delete the instruction JUMP from any jump chain it might be on. */
4334 delete_from_jump_chain (jump)
4338 rtx olabel = JUMP_LABEL (jump);
4340 /* Handle unconditional jumps. */
4341 if (jump_chain && olabel != 0
4342 && INSN_UID (olabel) < max_jump_chain
4343 && simplejump_p (jump))
4344 index = INSN_UID (olabel);
4345 /* Handle return insns. */
4346 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4350 if (jump_chain[index] == jump)
4351 jump_chain[index] = jump_chain[INSN_UID (jump)];
4356 for (insn = jump_chain[index];
4358 insn = jump_chain[INSN_UID (insn)])
4359 if (jump_chain[INSN_UID (insn)] == jump)
4361 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4367 /* If NLABEL is nonzero, throughout the rtx at LOC,
4368 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4369 zero, alter (RETURN) to (LABEL_REF NLABEL).
4371 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4372 validity with validate_change. Convert (set (pc) (label_ref olabel))
4375 Return 0 if we found a change we would like to make but it is invalid.
4376 Otherwise, return 1. */
4379 redirect_exp (loc, olabel, nlabel, insn)
4384 register rtx x = *loc;
4385 register RTX_CODE code = GET_CODE (x);
4389 if (code == LABEL_REF)
4391 if (XEXP (x, 0) == olabel)
4394 XEXP (x, 0) = nlabel;
4396 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4400 else if (code == RETURN && olabel == 0)
4402 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4403 if (loc == &PATTERN (insn))
4404 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4405 return validate_change (insn, loc, x, 0);
4408 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4409 && GET_CODE (SET_SRC (x)) == LABEL_REF
4410 && XEXP (SET_SRC (x), 0) == olabel)
4411 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4413 fmt = GET_RTX_FORMAT (code);
4414 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4417 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4422 for (j = 0; j < XVECLEN (x, i); j++)
4423 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4431 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4433 If the old jump target label (before the dispatch table) becomes unused,
4434 it and the dispatch table may be deleted. In that case, find the insn
4435 before the jump references that label and delete it and logical successors
4439 redirect_tablejump (jump, nlabel)
4442 register rtx olabel = JUMP_LABEL (jump);
4444 /* Add this jump to the jump_chain of NLABEL. */
4445 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4446 && INSN_UID (jump) < max_jump_chain)
4448 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4449 jump_chain[INSN_UID (nlabel)] = jump;
4452 PATTERN (jump) = gen_jump (nlabel);
4453 JUMP_LABEL (jump) = nlabel;
4454 ++LABEL_NUSES (nlabel);
4455 INSN_CODE (jump) = -1;
4457 if (--LABEL_NUSES (olabel) == 0)
4459 delete_labelref_insn (jump, olabel, 0);
4460 delete_insn (olabel);
4464 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4465 If we found one, delete it and then delete this insn if DELETE_THIS is
4466 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4469 delete_labelref_insn (insn, label, delete_this)
4476 if (GET_CODE (insn) != NOTE
4477 && reg_mentioned_p (label, PATTERN (insn)))
4488 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4489 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4503 /* Like rtx_equal_p except that it considers two REGs as equal
4504 if they renumber to the same value and considers two commutative
4505 operations to be the same if the order of the operands has been
4508 ??? Addition is not commutative on the PA due to the weird implicit
4509 space register selection rules for memory addresses. Therefore, we
4510 don't consider a + b == b + a.
4512 We could/should make this test a little tighter. Possibly only
4513 disabling it on the PA via some backend macro or only disabling this
4514 case when the PLUS is inside a MEM. */
4517 rtx_renumbered_equal_p (x, y)
4521 register RTX_CODE code = GET_CODE (x);
4527 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4528 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4529 && GET_CODE (SUBREG_REG (y)) == REG)))
4531 int reg_x = -1, reg_y = -1;
4532 int word_x = 0, word_y = 0;
4534 if (GET_MODE (x) != GET_MODE (y))
4537 /* If we haven't done any renumbering, don't
4538 make any assumptions. */
4539 if (reg_renumber == 0)
4540 return rtx_equal_p (x, y);
4544 reg_x = REGNO (SUBREG_REG (x));
4545 word_x = SUBREG_WORD (x);
4547 if (reg_renumber[reg_x] >= 0)
4549 reg_x = reg_renumber[reg_x] + word_x;
4557 if (reg_renumber[reg_x] >= 0)
4558 reg_x = reg_renumber[reg_x];
4561 if (GET_CODE (y) == SUBREG)
4563 reg_y = REGNO (SUBREG_REG (y));
4564 word_y = SUBREG_WORD (y);
4566 if (reg_renumber[reg_y] >= 0)
4568 reg_y = reg_renumber[reg_y];
4576 if (reg_renumber[reg_y] >= 0)
4577 reg_y = reg_renumber[reg_y];
4580 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4583 /* Now we have disposed of all the cases
4584 in which different rtx codes can match. */
4585 if (code != GET_CODE (y))
4597 return INTVAL (x) == INTVAL (y);
4600 /* We can't assume nonlocal labels have their following insns yet. */
4601 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4602 return XEXP (x, 0) == XEXP (y, 0);
4604 /* Two label-refs are equivalent if they point at labels
4605 in the same position in the instruction stream. */
4606 return (next_real_insn (XEXP (x, 0))
4607 == next_real_insn (XEXP (y, 0)));
4610 return XSTR (x, 0) == XSTR (y, 0);
4613 /* If we didn't match EQ equality above, they aren't the same. */
4620 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4622 if (GET_MODE (x) != GET_MODE (y))
4625 /* For commutative operations, the RTX match if the operand match in any
4626 order. Also handle the simple binary and unary cases without a loop.
4628 ??? Don't consider PLUS a commutative operator; see comments above. */
4629 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4631 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4632 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4633 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4634 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4635 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4636 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4637 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4638 else if (GET_RTX_CLASS (code) == '1')
4639 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4641 /* Compare the elements. If any pair of corresponding elements
4642 fail to match, return 0 for the whole things. */
4644 fmt = GET_RTX_FORMAT (code);
4645 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4651 if (XWINT (x, i) != XWINT (y, i))
4656 if (XINT (x, i) != XINT (y, i))
4661 if (strcmp (XSTR (x, i), XSTR (y, i)))
4666 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4671 if (XEXP (x, i) != XEXP (y, i))
4678 if (XVECLEN (x, i) != XVECLEN (y, i))
4680 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4681 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4692 /* If X is a hard register or equivalent to one or a subregister of one,
4693 return the hard register number. If X is a pseudo register that was not
4694 assigned a hard register, return the pseudo register number. Otherwise,
4695 return -1. Any rtx is valid for X. */
4701 if (GET_CODE (x) == REG)
4703 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4704 return reg_renumber[REGNO (x)];
4707 if (GET_CODE (x) == SUBREG)
4709 int base = true_regnum (SUBREG_REG (x));
4710 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4711 return SUBREG_WORD (x) + base;
4716 /* Optimize code of the form:
4718 for (x = a[i]; x; ...)
4720 for (x = a[i]; x; ...)
4724 Loop optimize will change the above code into
4728 { ...; if (! (x = ...)) break; }
4731 { ...; if (! (x = ...)) break; }
4734 In general, if the first test fails, the program can branch
4735 directly to `foo' and skip the second try which is doomed to fail.
4736 We run this after loop optimization and before flow analysis. */
4738 /* When comparing the insn patterns, we track the fact that different
4739 pseudo-register numbers may have been used in each computation.
4740 The following array stores an equivalence -- same_regs[I] == J means
4741 that pseudo register I was used in the first set of tests in a context
4742 where J was used in the second set. We also count the number of such
4743 pending equivalences. If nonzero, the expressions really aren't the
4746 static int *same_regs;
4748 static int num_same_regs;
4750 /* Track any registers modified between the target of the first jump and
4751 the second jump. They never compare equal. */
4753 static char *modified_regs;
4755 /* Record if memory was modified. */
4757 static int modified_mem;
4759 /* Called via note_stores on each insn between the target of the first
4760 branch and the second branch. It marks any changed registers. */
4763 mark_modified_reg (dest, x)
4765 rtx x ATTRIBUTE_UNUSED;
4769 if (GET_CODE (dest) == SUBREG)
4770 dest = SUBREG_REG (dest);
4772 if (GET_CODE (dest) == MEM)
4775 if (GET_CODE (dest) != REG)
4778 regno = REGNO (dest);
4779 if (regno >= FIRST_PSEUDO_REGISTER)
4780 modified_regs[regno] = 1;
4782 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4783 modified_regs[regno + i] = 1;
4786 /* F is the first insn in the chain of insns. */
4789 thread_jumps (f, max_reg, flag_before_loop)
4792 int flag_before_loop;
4794 /* Basic algorithm is to find a conditional branch,
4795 the label it may branch to, and the branch after
4796 that label. If the two branches test the same condition,
4797 walk back from both branch paths until the insn patterns
4798 differ, or code labels are hit. If we make it back to
4799 the target of the first branch, then we know that the first branch
4800 will either always succeed or always fail depending on the relative
4801 senses of the two branches. So adjust the first branch accordingly
4804 rtx label, b1, b2, t1, t2;
4805 enum rtx_code code1, code2;
4806 rtx b1op0, b1op1, b2op0, b2op1;
4811 /* Allocate register tables and quick-reset table. */
4812 modified_regs = (char *) alloca (max_reg * sizeof (char));
4813 same_regs = (int *) alloca (max_reg * sizeof (int));
4814 all_reset = (int *) alloca (max_reg * sizeof (int));
4815 for (i = 0; i < max_reg; i++)
4822 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4824 /* Get to a candidate branch insn. */
4825 if (GET_CODE (b1) != JUMP_INSN
4826 || ! condjump_p (b1) || simplejump_p (b1)
4827 || JUMP_LABEL (b1) == 0)
4830 bzero (modified_regs, max_reg * sizeof (char));
4833 bcopy ((char *) all_reset, (char *) same_regs,
4834 max_reg * sizeof (int));
4837 label = JUMP_LABEL (b1);
4839 /* Look for a branch after the target. Record any registers and
4840 memory modified between the target and the branch. Stop when we
4841 get to a label since we can't know what was changed there. */
4842 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4844 if (GET_CODE (b2) == CODE_LABEL)
4847 else if (GET_CODE (b2) == JUMP_INSN)
4849 /* If this is an unconditional jump and is the only use of
4850 its target label, we can follow it. */
4851 if (simplejump_p (b2)
4852 && JUMP_LABEL (b2) != 0
4853 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4855 b2 = JUMP_LABEL (b2);
4862 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4865 if (GET_CODE (b2) == CALL_INSN)
4868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4869 if (call_used_regs[i] && ! fixed_regs[i]
4870 && i != STACK_POINTER_REGNUM
4871 && i != FRAME_POINTER_REGNUM
4872 && i != HARD_FRAME_POINTER_REGNUM
4873 && i != ARG_POINTER_REGNUM)
4874 modified_regs[i] = 1;
4877 note_stores (PATTERN (b2), mark_modified_reg);
4880 /* Check the next candidate branch insn from the label
4883 || GET_CODE (b2) != JUMP_INSN
4885 || ! condjump_p (b2)
4886 || simplejump_p (b2))
4889 /* Get the comparison codes and operands, reversing the
4890 codes if appropriate. If we don't have comparison codes,
4891 we can't do anything. */
4892 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4893 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4894 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4895 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4896 code1 = reverse_condition (code1);
4898 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4899 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4900 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4901 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4902 code2 = reverse_condition (code2);
4904 /* If they test the same things and knowing that B1 branches
4905 tells us whether or not B2 branches, check if we
4906 can thread the branch. */
4907 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4908 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4909 && (comparison_dominates_p (code1, code2)
4910 || (comparison_dominates_p (code1, reverse_condition (code2))
4911 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4915 t1 = prev_nonnote_insn (b1);
4916 t2 = prev_nonnote_insn (b2);
4918 while (t1 != 0 && t2 != 0)
4922 /* We have reached the target of the first branch.
4923 If there are no pending register equivalents,
4924 we know that this branch will either always
4925 succeed (if the senses of the two branches are
4926 the same) or always fail (if not). */
4929 if (num_same_regs != 0)
4932 if (comparison_dominates_p (code1, code2))
4933 new_label = JUMP_LABEL (b2);
4935 new_label = get_label_after (b2);
4937 if (JUMP_LABEL (b1) != new_label)
4939 rtx prev = PREV_INSN (new_label);
4941 if (flag_before_loop
4942 && GET_CODE (prev) == NOTE
4943 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4945 /* Don't thread to the loop label. If a loop
4946 label is reused, loop optimization will
4947 be disabled for that loop. */
4948 new_label = gen_label_rtx ();
4949 emit_label_after (new_label, PREV_INSN (prev));
4951 changed |= redirect_jump (b1, new_label);
4956 /* If either of these is not a normal insn (it might be
4957 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4958 have already been skipped above.) Similarly, fail
4959 if the insns are different. */
4960 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4961 || recog_memoized (t1) != recog_memoized (t2)
4962 || ! rtx_equal_for_thread_p (PATTERN (t1),
4966 t1 = prev_nonnote_insn (t1);
4967 t2 = prev_nonnote_insn (t2);
4974 /* This is like RTX_EQUAL_P except that it knows about our handling of
4975 possibly equivalent registers and knows to consider volatile and
4976 modified objects as not equal.
4978 YINSN is the insn containing Y. */
4981 rtx_equal_for_thread_p (x, y, yinsn)
4987 register enum rtx_code code;
4990 code = GET_CODE (x);
4991 /* Rtx's of different codes cannot be equal. */
4992 if (code != GET_CODE (y))
4995 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4996 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4998 if (GET_MODE (x) != GET_MODE (y))
5001 /* For floating-point, consider everything unequal. This is a bit
5002 pessimistic, but this pass would only rarely do anything for FP
5004 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5005 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5008 /* For commutative operations, the RTX match if the operand match in any
5009 order. Also handle the simple binary and unary cases without a loop. */
5010 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5011 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5012 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5013 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5014 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5015 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5016 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5017 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5018 else if (GET_RTX_CLASS (code) == '1')
5019 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5021 /* Handle special-cases first. */
5025 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5028 /* If neither is user variable or hard register, check for possible
5030 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5031 || REGNO (x) < FIRST_PSEUDO_REGISTER
5032 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5035 if (same_regs[REGNO (x)] == -1)
5037 same_regs[REGNO (x)] = REGNO (y);
5040 /* If this is the first time we are seeing a register on the `Y'
5041 side, see if it is the last use. If not, we can't thread the
5042 jump, so mark it as not equivalent. */
5043 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5049 return (same_regs[REGNO (x)] == REGNO (y));
5054 /* If memory modified or either volatile, not equivalent.
5055 Else, check address. */
5056 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5059 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5062 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5068 /* Cancel a pending `same_regs' if setting equivalenced registers.
5069 Then process source. */
5070 if (GET_CODE (SET_DEST (x)) == REG
5071 && GET_CODE (SET_DEST (y)) == REG)
5073 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5075 same_regs[REGNO (SET_DEST (x))] = -1;
5078 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5082 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5085 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5088 return XEXP (x, 0) == XEXP (y, 0);
5091 return XSTR (x, 0) == XSTR (y, 0);
5100 fmt = GET_RTX_FORMAT (code);
5101 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5106 if (XWINT (x, i) != XWINT (y, i))
5112 if (XINT (x, i) != XINT (y, i))
5118 /* Two vectors must have the same length. */
5119 if (XVECLEN (x, i) != XVECLEN (y, i))
5122 /* And the corresponding elements must match. */
5123 for (j = 0; j < XVECLEN (x, i); j++)
5124 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5125 XVECEXP (y, i, j), yinsn) == 0)
5130 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5136 if (strcmp (XSTR (x, i), XSTR (y, i)))
5141 /* These are just backpointers, so they don't matter. */
5147 /* It is believed that rtx's at this level will never
5148 contain anything but integers and other rtx's,
5149 except for within LABEL_REFs and SYMBOL_REFs. */
5159 /* Return the insn that NEW can be safely inserted in front of starting at
5160 the jump insn INSN. Return 0 if it is not safe to do this jump
5161 optimization. Note that NEW must contain a single set. */
5164 find_insert_position (insn, new)
5171 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5172 if (GET_CODE (PATTERN (new)) != PARALLEL)
5175 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5176 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5177 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5184 /* There is a good chance that the previous insn PREV sets the thing
5185 being clobbered (often the CC in a hard reg). If PREV does not
5186 use what NEW sets, we can insert NEW before PREV. */
5188 prev = prev_active_insn (insn);
5189 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5190 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5191 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5193 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5197 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5199 #endif /* !HAVE_cc0 */