1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
37 #include "basic-block.h"
40 static bool prefer_and_bit_test (enum machine_mode, int);
41 static void do_jump_by_parts_greater (tree, int, rtx, rtx, int);
42 static void do_jump_by_parts_equality (tree, rtx, rtx, int);
43 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
46 /* Invert probability if there is any. -1 stands for unknown. */
51 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
54 /* At the start of a function, record that we have no previously-pushed
55 arguments waiting to be popped. */
58 init_pending_stack_adjust (void)
60 pending_stack_adjust = 0;
63 /* Discard any pending stack adjustment. This avoid relying on the
64 RTL optimizers to remove useless adjustments when we know the
65 stack pointer value is dead. */
67 discard_pending_stack_adjust (void)
69 stack_pointer_delta -= pending_stack_adjust;
70 pending_stack_adjust = 0;
73 /* When exiting from function, if safe, clear out any pending stack adjust
74 so the adjustment won't get done.
76 Note, if the current function calls alloca, then it must have a
77 frame pointer regardless of the value of flag_omit_frame_pointer. */
80 clear_pending_stack_adjust (void)
83 && (! flag_omit_frame_pointer || cfun->calls_alloca)
85 discard_pending_stack_adjust ();
88 /* Pop any previously-pushed arguments that have not been popped yet. */
91 do_pending_stack_adjust (void)
93 if (inhibit_defer_pop == 0)
95 if (pending_stack_adjust != 0)
96 adjust_stack (GEN_INT (pending_stack_adjust));
97 pending_stack_adjust = 0;
101 /* Expand conditional expressions. */
103 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
104 LABEL is an rtx of code CODE_LABEL, in this function and all the
108 jumpifnot (tree exp, rtx label, int prob)
110 do_jump (exp, label, NULL_RTX, inv (prob));
113 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
116 jumpif (tree exp, rtx label, int prob)
118 do_jump (exp, NULL_RTX, label, prob);
121 /* Used internally by prefer_and_bit_test. */
123 static GTY(()) rtx and_reg;
124 static GTY(()) rtx and_test;
125 static GTY(()) rtx shift_test;
127 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
128 where X is an arbitrary register of mode MODE. Return true if the former
132 prefer_and_bit_test (enum machine_mode mode, int bitnum)
136 /* Set up rtxes for the two variations. Use NULL as a placeholder
137 for the BITNUM-based constants. */
138 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
139 and_test = gen_rtx_AND (mode, and_reg, NULL);
140 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
145 /* Change the mode of the previously-created rtxes. */
146 PUT_MODE (and_reg, mode);
147 PUT_MODE (and_test, mode);
148 PUT_MODE (shift_test, mode);
149 PUT_MODE (XEXP (shift_test, 0), mode);
152 /* Fill in the integers. */
154 = immed_double_const ((unsigned HOST_WIDE_INT) 1 << bitnum, 0, mode);
155 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
157 return (rtx_cost (and_test, IF_THEN_ELSE, optimize_insn_for_speed_p ())
158 <= rtx_cost (shift_test, IF_THEN_ELSE, optimize_insn_for_speed_p ()));
161 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
162 the result is zero, or IF_TRUE_LABEL if the result is one.
163 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
164 meaning fall through in that case.
166 do_jump always does any pending stack adjust except when it does not
167 actually perform a jump. An example where there is no jump
168 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
170 PROB is probability of jump to if_true_label, or -1 if unknown. */
173 do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
175 enum tree_code code = TREE_CODE (exp);
179 enum machine_mode mode;
180 rtx drop_through_label = 0;
188 temp = integer_zerop (exp) ? if_false_label : if_true_label;
194 /* This is not true with #pragma weak */
196 /* The address of something can never be zero. */
198 emit_jump (if_true_label);
203 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
204 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
205 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
206 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
209 /* If we are narrowing the operand, we have to do the compare in the
211 if ((TYPE_PRECISION (TREE_TYPE (exp))
212 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
214 case NON_LVALUE_EXPR:
219 /* These cannot change zero->nonzero or vice versa. */
220 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
224 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
230 rtx label1 = gen_label_rtx ();
231 if (!if_true_label || !if_false_label)
233 drop_through_label = gen_label_rtx ();
235 if_true_label = drop_through_label;
237 if_false_label = drop_through_label;
240 do_pending_stack_adjust ();
241 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
242 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
244 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
249 /* Lowered by gimplify.c. */
255 case ARRAY_RANGE_REF:
257 HOST_WIDE_INT bitsize, bitpos;
259 enum machine_mode mode;
264 /* Get description of this reference. We don't actually care
265 about the underlying object here. */
266 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
267 &unsignedp, &volatilep, false);
269 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
270 if (! SLOW_BYTE_ACCESS
271 && type != 0 && bitsize >= 0
272 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
273 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
274 != CODE_FOR_nothing))
276 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
285 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
287 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
288 != MODE_COMPLEX_FLOAT);
289 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
290 != MODE_COMPLEX_INT);
292 if (integer_zerop (TREE_OPERAND (exp, 1)))
293 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
295 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
296 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
297 do_jump_by_parts_equality (exp, if_false_label, if_true_label, prob);
299 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label,
305 /* Nonzero iff operands of minus differ. */
306 exp = build2 (NE_EXPR, TREE_TYPE (exp),
307 TREE_OPERAND (exp, 0),
308 TREE_OPERAND (exp, 1));
312 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
314 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
315 != MODE_COMPLEX_FLOAT);
316 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
317 != MODE_COMPLEX_INT);
319 if (integer_zerop (TREE_OPERAND (exp, 1)))
320 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
321 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
322 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
323 do_jump_by_parts_equality (exp, if_true_label, if_false_label,
326 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label,
332 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
333 if (GET_MODE_CLASS (mode) == MODE_INT
334 && ! can_compare_p (LT, mode, ccp_jump))
335 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label, prob);
337 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label,
342 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
343 if (GET_MODE_CLASS (mode) == MODE_INT
344 && ! can_compare_p (LE, mode, ccp_jump))
345 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label,
348 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label,
353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
354 if (GET_MODE_CLASS (mode) == MODE_INT
355 && ! can_compare_p (GT, mode, ccp_jump))
356 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label,
359 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label,
364 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
365 if (GET_MODE_CLASS (mode) == MODE_INT
366 && ! can_compare_p (GE, mode, ccp_jump))
367 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label,
370 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label,
377 enum rtx_code cmp, rcmp;
380 if (code == UNORDERED_EXPR)
381 cmp = UNORDERED, rcmp = ORDERED;
383 cmp = ORDERED, rcmp = UNORDERED;
384 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
387 if (! can_compare_p (cmp, mode, ccp_jump)
388 && (can_compare_p (rcmp, mode, ccp_jump)
389 /* If the target doesn't provide either UNORDERED or ORDERED
390 comparisons, canonicalize on UNORDERED for the library. */
391 || rcmp == UNORDERED))
395 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label, prob);
397 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label,
403 enum rtx_code rcode1;
404 enum tree_code tcode1, tcode2;
408 tcode1 = UNORDERED_EXPR;
413 tcode1 = UNORDERED_EXPR;
418 tcode1 = UNORDERED_EXPR;
423 tcode1 = UNORDERED_EXPR;
428 tcode1 = UNORDERED_EXPR;
432 /* It is ok for LTGT_EXPR to trap when the result is unordered,
433 so expand to (a < b) || (a > b). */
440 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
441 if (can_compare_p (rcode1, mode, ccp_jump))
442 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
443 if_true_label, prob);
446 tree op0 = save_expr (TREE_OPERAND (exp, 0));
447 tree op1 = save_expr (TREE_OPERAND (exp, 1));
450 /* If the target doesn't support combined unordered
451 compares, decompose into two comparisons. */
452 if (if_true_label == 0)
453 drop_through_label = if_true_label = gen_label_rtx ();
455 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
456 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
457 do_jump (cmp0, 0, if_true_label, prob);
458 do_jump (cmp1, if_false_label, if_true_label, prob);
464 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
465 See if the former is preferred for jump tests and restore it
467 if (integer_onep (TREE_OPERAND (exp, 1)))
469 tree exp0 = TREE_OPERAND (exp, 0);
470 rtx set_label, clr_label;
471 int setclr_prob = prob;
473 /* Strip narrowing integral type conversions. */
474 while (CONVERT_EXPR_P (exp0)
475 && TREE_OPERAND (exp0, 0) != error_mark_node
476 && TYPE_PRECISION (TREE_TYPE (exp0))
477 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
478 exp0 = TREE_OPERAND (exp0, 0);
480 /* "exp0 ^ 1" inverts the sense of the single bit test. */
481 if (TREE_CODE (exp0) == BIT_XOR_EXPR
482 && integer_onep (TREE_OPERAND (exp0, 1)))
484 exp0 = TREE_OPERAND (exp0, 0);
485 clr_label = if_true_label;
486 set_label = if_false_label;
487 setclr_prob = inv (prob);
491 clr_label = if_false_label;
492 set_label = if_true_label;
495 if (TREE_CODE (exp0) == RSHIFT_EXPR)
497 tree arg = TREE_OPERAND (exp0, 0);
498 tree shift = TREE_OPERAND (exp0, 1);
499 tree argtype = TREE_TYPE (arg);
500 if (TREE_CODE (shift) == INTEGER_CST
501 && compare_tree_int (shift, 0) >= 0
502 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
503 && prefer_and_bit_test (TYPE_MODE (argtype),
504 TREE_INT_CST_LOW (shift)))
506 unsigned HOST_WIDE_INT mask
507 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
508 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
509 build_int_cst_wide_type (argtype, mask, 0)),
510 clr_label, set_label, setclr_prob);
516 /* If we are AND'ing with a small constant, do this comparison in the
517 smallest type that fits. If the machine doesn't have comparisons
518 that small, it will be converted back to the wider comparison.
519 This helps if we are testing the sign bit of a narrower object.
520 combine can't do this for us because it can't know whether a
521 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
523 if (! SLOW_BYTE_ACCESS
524 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
525 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
526 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
527 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
528 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
529 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
530 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
531 != CODE_FOR_nothing))
533 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
538 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
539 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
542 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
545 /* High branch cost, expand as the bitwise AND of the conditions.
546 Do the same if the RHS has side effects, because we're effectively
547 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
548 if (BRANCH_COST (optimize_insn_for_speed_p (),
550 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
553 case TRUTH_ANDIF_EXPR:
554 if (if_false_label == NULL_RTX)
556 drop_through_label = gen_label_rtx ();
557 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX, prob);
558 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label, prob);
562 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX, prob);
563 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
569 /* High branch cost, expand as the bitwise OR of the conditions.
570 Do the same if the RHS has side effects, because we're effectively
571 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
572 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
573 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
576 case TRUTH_ORIF_EXPR:
577 if (if_true_label == NULL_RTX)
579 drop_through_label = gen_label_rtx ();
580 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label, prob);
581 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX, prob);
585 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label, prob);
586 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
590 /* Fall through and generate the normal code. */
593 temp = expand_normal (exp);
594 do_pending_stack_adjust ();
595 /* The RTL optimizers prefer comparisons against pseudos. */
596 if (GET_CODE (temp) == SUBREG)
598 /* Compare promoted variables in their promoted mode. */
599 if (SUBREG_PROMOTED_VAR_P (temp)
600 && REG_P (XEXP (temp, 0)))
601 temp = XEXP (temp, 0);
603 temp = copy_to_reg (temp);
605 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
606 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
607 GET_MODE (temp), NULL_RTX,
608 if_false_label, if_true_label, prob);
611 if (drop_through_label)
613 do_pending_stack_adjust ();
614 emit_label (drop_through_label);
618 /* Compare OP0 with OP1, word at a time, in mode MODE.
619 UNSIGNEDP says to do unsigned comparison.
620 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
623 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
624 rtx op1, rtx if_false_label, rtx if_true_label,
627 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
628 rtx drop_through_label = 0;
631 if (! if_true_label || ! if_false_label)
632 drop_through_label = gen_label_rtx ();
634 if_true_label = drop_through_label;
635 if (! if_false_label)
636 if_false_label = drop_through_label;
638 /* Compare a word at a time, high order first. */
639 for (i = 0; i < nwords; i++)
641 rtx op0_word, op1_word;
643 if (WORDS_BIG_ENDIAN)
645 op0_word = operand_subword_force (op0, i, mode);
646 op1_word = operand_subword_force (op1, i, mode);
650 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
651 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
654 /* All but high-order word must be compared as unsigned. */
655 do_compare_rtx_and_jump (op0_word, op1_word, GT,
656 (unsignedp || i > 0), word_mode, NULL_RTX,
657 NULL_RTX, if_true_label, prob);
659 /* Consider lower words only if these are equal. */
660 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
661 NULL_RTX, NULL_RTX, if_false_label,
666 emit_jump (if_false_label);
667 if (drop_through_label)
668 emit_label (drop_through_label);
671 /* Given a comparison expression EXP for values too wide to be compared
672 with one insn, test the comparison and jump to the appropriate label.
673 The code of EXP is ignored; we always test GT if SWAP is 0,
674 and LT if SWAP is 1. */
677 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
678 rtx if_true_label, int prob)
680 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
681 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
682 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
683 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
685 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
686 if_true_label, prob);
689 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
690 mode, MODE, that is too wide for the available compare insns. Either
691 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
692 to indicate drop through. */
695 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
696 rtx if_false_label, rtx if_true_label, int prob)
698 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
701 rtx drop_through_label = 0;
703 /* The fastest way of doing this comparison on almost any machine is to
704 "or" all the words and compare the result. If all have to be loaded
705 from memory and this is a very wide item, it's possible this may
706 be slower, but that's highly unlikely. */
708 part = gen_reg_rtx (word_mode);
709 emit_move_insn (part, operand_subword_force (op0, 0, mode));
710 for (i = 1; i < nwords && part != 0; i++)
711 part = expand_binop (word_mode, ior_optab, part,
712 operand_subword_force (op0, i, mode),
713 part, 1, OPTAB_WIDEN);
717 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
718 NULL_RTX, if_false_label, if_true_label, prob);
722 /* If we couldn't do the "or" simply, do this with a series of compares. */
723 if (! if_false_label)
724 drop_through_label = if_false_label = gen_label_rtx ();
726 for (i = 0; i < nwords; i++)
727 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
728 const0_rtx, EQ, 1, word_mode, NULL_RTX,
729 if_false_label, NULL_RTX, prob);
732 emit_jump (if_true_label);
734 if (drop_through_label)
735 emit_label (drop_through_label);
738 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
739 where MODE is an integer mode too wide to be compared with one insn.
740 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
741 to indicate drop through. */
744 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
745 rtx if_false_label, rtx if_true_label, int prob)
747 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
748 rtx drop_through_label = 0;
751 if (op1 == const0_rtx)
753 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
757 else if (op0 == const0_rtx)
759 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
764 if (! if_false_label)
765 drop_through_label = if_false_label = gen_label_rtx ();
767 for (i = 0; i < nwords; i++)
768 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
769 operand_subword_force (op1, i, mode),
770 EQ, 0, word_mode, NULL_RTX,
771 if_false_label, NULL_RTX, prob);
774 emit_jump (if_true_label);
775 if (drop_through_label)
776 emit_label (drop_through_label);
779 /* Given an EQ_EXPR expression EXP for values too wide to be compared
780 with one insn, test the comparison and jump to the appropriate label. */
783 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label,
786 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
787 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
788 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
789 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
790 if_true_label, prob);
793 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
794 MODE is the machine mode of the comparison, not of the result.
795 (including code to compute the values to be compared) and set CC0
796 according to the result. The decision as to signed or unsigned
797 comparison must be made by the caller.
799 We force a stack adjustment unless there are currently
800 things pushed on the stack that aren't yet used.
802 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
806 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
807 enum machine_mode mode, rtx size)
811 /* If one operand is constant, make it the second one. Only do this
812 if the other operand is not constant as well. */
814 if (swap_commutative_operands_p (op0, op1))
819 code = swap_condition (code);
822 do_pending_stack_adjust ();
824 code = unsignedp ? unsigned_condition (code) : code;
825 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
828 if (CONSTANT_P (tem))
831 if (COMPARISON_P (tem))
833 code = GET_CODE (tem);
836 mode = GET_MODE (op0);
837 unsignedp = (code == GTU || code == LTU
838 || code == GEU || code == LEU);
842 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
845 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
847 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
851 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
852 The decision as to signed or unsigned comparison must be made by the caller.
854 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
858 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
859 enum machine_mode mode, rtx size, rtx if_false_label,
860 rtx if_true_label, int prob)
863 int dummy_true_label = 0;
865 /* Reverse the comparison if that is safe and we want to jump if it is
867 if (! if_true_label && ! FLOAT_MODE_P (mode))
869 if_true_label = if_false_label;
871 code = reverse_condition (code);
875 /* If one operand is constant, make it the second one. Only do this
876 if the other operand is not constant as well. */
878 if (swap_commutative_operands_p (op0, op1))
883 code = swap_condition (code);
886 do_pending_stack_adjust ();
888 code = unsignedp ? unsigned_condition (code) : code;
889 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
892 if (CONSTANT_P (tem))
894 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
895 ? if_false_label : if_true_label;
901 code = GET_CODE (tem);
902 mode = GET_MODE (tem);
905 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
911 dummy_true_label = 1;
912 if_true_label = gen_label_rtx ();
915 if (GET_MODE_CLASS (mode) == MODE_INT
916 && ! can_compare_p (code, mode, ccp_jump))
921 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
922 if_false_label, if_true_label, prob);
926 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
927 if_true_label, if_false_label,
932 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
933 if_false_label, if_true_label, prob);
937 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
938 if_true_label, if_false_label,
943 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
944 if_false_label, if_true_label, prob);
948 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
949 if_true_label, if_false_label,
954 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
955 if_false_label, if_true_label, prob);
959 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
960 if_true_label, if_false_label,
965 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
966 if_true_label, prob);
970 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
971 if_false_label, inv (prob));
980 rtx last = get_last_insn ();
981 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
983 if (prob != -1 && profile_status != PROFILE_ABSENT)
985 for (last = NEXT_INSN (last);
986 last && NEXT_INSN (last);
987 last = NEXT_INSN (last))
993 || !any_condjump_p (last))
996 fprintf (dump_file, "Failed to add probability note\n");
1000 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1001 add_reg_note (last, REG_BR_PROB, GEN_INT (prob));
1007 emit_jump (if_false_label);
1008 if (dummy_true_label)
1009 emit_label (if_true_label);
1012 /* Generate code for a comparison expression EXP (including code to compute
1013 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1014 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1015 generated code will drop through.
1016 SIGNED_CODE should be the rtx operation for this comparison for
1017 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1019 We force a stack adjustment unless there are currently
1020 things pushed on the stack that aren't yet used. */
1023 do_compare_and_jump (tree exp, enum rtx_code signed_code,
1024 enum rtx_code unsigned_code, rtx if_false_label,
1025 rtx if_true_label, int prob)
1029 enum machine_mode mode;
1033 /* Don't crash if the comparison was erroneous. */
1034 op0 = expand_normal (TREE_OPERAND (exp, 0));
1035 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
1038 op1 = expand_normal (TREE_OPERAND (exp, 1));
1039 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1042 type = TREE_TYPE (TREE_OPERAND (exp, 0));
1043 mode = TYPE_MODE (type);
1044 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1045 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1046 || (GET_MODE_BITSIZE (mode)
1047 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1050 /* op0 might have been replaced by promoted constant, in which
1051 case the type of second argument should be used. */
1052 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1053 mode = TYPE_MODE (type);
1055 unsignedp = TYPE_UNSIGNED (type);
1056 code = unsignedp ? unsigned_code : signed_code;
1058 #ifdef HAVE_canonicalize_funcptr_for_compare
1059 /* If function pointers need to be "canonicalized" before they can
1060 be reliably compared, then canonicalize them.
1061 Only do this if *both* sides of the comparison are function pointers.
1062 If one side isn't, we want a noncanonicalized comparison. See PR
1063 middle-end/17564. */
1064 if (HAVE_canonicalize_funcptr_for_compare
1065 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1066 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1068 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1069 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1072 rtx new_op0 = gen_reg_rtx (mode);
1073 rtx new_op1 = gen_reg_rtx (mode);
1075 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1078 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1083 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1085 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1086 if_false_label, if_true_label, prob);
1089 #include "gt-dojump.h"