1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Return nonzero if the operation overflows according to UNSIGNED_P.
286 Each argument is given as two `HOST_WIDE_INT' pieces.
287 One argument is L1 and H1; the other, L2 and H2.
288 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
291 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
292 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
293 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
296 unsigned HOST_WIDE_INT l;
300 h = h1 + h2 + (l < l1);
306 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
308 return OVERFLOW_SUM_SIGN (h1, h2, h);
311 /* Negate a doubleword integer with doubleword result.
312 Return nonzero if the operation overflows, assuming it's signed.
313 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
314 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
317 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
318 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
324 return (*hv & h1) < 0;
334 /* Multiply two doubleword integers with doubleword result.
335 Return nonzero if the operation overflows according to UNSIGNED_P.
336 Each argument is given as two `HOST_WIDE_INT' pieces.
337 One argument is L1 and H1; the other, L2 and H2.
338 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
341 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
342 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
343 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
346 HOST_WIDE_INT arg1[4];
347 HOST_WIDE_INT arg2[4];
348 HOST_WIDE_INT prod[4 * 2];
349 unsigned HOST_WIDE_INT carry;
351 unsigned HOST_WIDE_INT toplow, neglow;
352 HOST_WIDE_INT tophigh, neghigh;
354 encode (arg1, l1, h1);
355 encode (arg2, l2, h2);
357 memset (prod, 0, sizeof prod);
359 for (i = 0; i < 4; i++)
362 for (j = 0; j < 4; j++)
365 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
366 carry += arg1[i] * arg2[j];
367 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
369 prod[k] = LOWPART (carry);
370 carry = HIGHPART (carry);
375 decode (prod, lv, hv);
376 decode (prod + 4, &toplow, &tophigh);
378 /* Unsigned overflow is immediate. */
380 return (toplow | tophigh) != 0;
382 /* Check for signed overflow by calculating the signed representation of the
383 top half of the result; it should agree with the low half's sign bit. */
386 neg_double (l2, h2, &neglow, &neghigh);
387 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
391 neg_double (l1, h1, &neglow, &neghigh);
392 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
394 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
397 /* Shift the doubleword integer in L1, H1 left by COUNT places
398 keeping only PREC bits of result.
399 Shift right if COUNT is negative.
400 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
401 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
404 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
405 HOST_WIDE_INT count, unsigned int prec,
406 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
408 unsigned HOST_WIDE_INT signmask;
412 rshift_double (l1, h1, -count, prec, lv, hv, arith);
416 if (SHIFT_COUNT_TRUNCATED)
419 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
421 /* Shifting by the host word size is undefined according to the
422 ANSI standard, so we must handle this as a special case. */
426 else if (count >= HOST_BITS_PER_WIDE_INT)
428 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
433 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
434 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
438 /* Sign extend all bits that are beyond the precision. */
440 signmask = -((prec > HOST_BITS_PER_WIDE_INT
441 ? ((unsigned HOST_WIDE_INT) *hv
442 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
443 : (*lv >> (prec - 1))) & 1);
445 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
447 else if (prec >= HOST_BITS_PER_WIDE_INT)
449 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
450 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
455 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
456 *lv |= signmask << prec;
460 /* Shift the doubleword integer in L1, H1 right by COUNT places
461 keeping only PREC bits of result. COUNT must be positive.
462 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
463 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
466 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
467 HOST_WIDE_INT count, unsigned int prec,
468 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
471 unsigned HOST_WIDE_INT signmask;
474 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
477 if (SHIFT_COUNT_TRUNCATED)
480 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
482 /* Shifting by the host word size is undefined according to the
483 ANSI standard, so we must handle this as a special case. */
487 else if (count >= HOST_BITS_PER_WIDE_INT)
490 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
494 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
496 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
499 /* Zero / sign extend all bits that are beyond the precision. */
501 if (count >= (HOST_WIDE_INT)prec)
506 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
508 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
510 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
511 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
516 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
517 *lv |= signmask << (prec - count);
521 /* Rotate the doubleword integer in L1, H1 left by COUNT places
522 keeping only PREC bits of result.
523 Rotate right if COUNT is negative.
524 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
527 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
528 HOST_WIDE_INT count, unsigned int prec,
529 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
531 unsigned HOST_WIDE_INT s1l, s2l;
532 HOST_WIDE_INT s1h, s2h;
538 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
539 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
544 /* Rotate the doubleword integer in L1, H1 left by COUNT places
545 keeping only PREC bits of result. COUNT must be positive.
546 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
549 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
550 HOST_WIDE_INT count, unsigned int prec,
551 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
553 unsigned HOST_WIDE_INT s1l, s2l;
554 HOST_WIDE_INT s1h, s2h;
560 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
561 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
566 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
567 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
568 CODE is a tree code for a kind of division, one of
569 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
571 It controls how the quotient is rounded to an integer.
572 Return nonzero if the operation overflows.
573 UNS nonzero says do unsigned division. */
576 div_and_round_double (enum tree_code code, int uns,
577 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
578 HOST_WIDE_INT hnum_orig,
579 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
580 HOST_WIDE_INT hden_orig,
581 unsigned HOST_WIDE_INT *lquo,
582 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
586 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
587 HOST_WIDE_INT den[4], quo[4];
589 unsigned HOST_WIDE_INT work;
590 unsigned HOST_WIDE_INT carry = 0;
591 unsigned HOST_WIDE_INT lnum = lnum_orig;
592 HOST_WIDE_INT hnum = hnum_orig;
593 unsigned HOST_WIDE_INT lden = lden_orig;
594 HOST_WIDE_INT hden = hden_orig;
597 if (hden == 0 && lden == 0)
598 overflow = 1, lden = 1;
600 /* Calculate quotient sign and convert operands to unsigned. */
606 /* (minimum integer) / (-1) is the only overflow case. */
607 if (neg_double (lnum, hnum, &lnum, &hnum)
608 && ((HOST_WIDE_INT) lden & hden) == -1)
614 neg_double (lden, hden, &lden, &hden);
618 if (hnum == 0 && hden == 0)
619 { /* single precision */
621 /* This unsigned division rounds toward zero. */
627 { /* trivial case: dividend < divisor */
628 /* hden != 0 already checked. */
635 memset (quo, 0, sizeof quo);
637 memset (num, 0, sizeof num); /* to zero 9th element */
638 memset (den, 0, sizeof den);
640 encode (num, lnum, hnum);
641 encode (den, lden, hden);
643 /* Special code for when the divisor < BASE. */
644 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
646 /* hnum != 0 already checked. */
647 for (i = 4 - 1; i >= 0; i--)
649 work = num[i] + carry * BASE;
650 quo[i] = work / lden;
656 /* Full double precision division,
657 with thanks to Don Knuth's "Seminumerical Algorithms". */
658 int num_hi_sig, den_hi_sig;
659 unsigned HOST_WIDE_INT quo_est, scale;
661 /* Find the highest nonzero divisor digit. */
662 for (i = 4 - 1;; i--)
669 /* Insure that the first digit of the divisor is at least BASE/2.
670 This is required by the quotient digit estimation algorithm. */
672 scale = BASE / (den[den_hi_sig] + 1);
674 { /* scale divisor and dividend */
676 for (i = 0; i <= 4 - 1; i++)
678 work = (num[i] * scale) + carry;
679 num[i] = LOWPART (work);
680 carry = HIGHPART (work);
685 for (i = 0; i <= 4 - 1; i++)
687 work = (den[i] * scale) + carry;
688 den[i] = LOWPART (work);
689 carry = HIGHPART (work);
690 if (den[i] != 0) den_hi_sig = i;
697 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
699 /* Guess the next quotient digit, quo_est, by dividing the first
700 two remaining dividend digits by the high order quotient digit.
701 quo_est is never low and is at most 2 high. */
702 unsigned HOST_WIDE_INT tmp;
704 num_hi_sig = i + den_hi_sig + 1;
705 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
706 if (num[num_hi_sig] != den[den_hi_sig])
707 quo_est = work / den[den_hi_sig];
711 /* Refine quo_est so it's usually correct, and at most one high. */
712 tmp = work - quo_est * den[den_hi_sig];
714 && (den[den_hi_sig - 1] * quo_est
715 > (tmp * BASE + num[num_hi_sig - 2])))
718 /* Try QUO_EST as the quotient digit, by multiplying the
719 divisor by QUO_EST and subtracting from the remaining dividend.
720 Keep in mind that QUO_EST is the I - 1st digit. */
723 for (j = 0; j <= den_hi_sig; j++)
725 work = quo_est * den[j] + carry;
726 carry = HIGHPART (work);
727 work = num[i + j] - LOWPART (work);
728 num[i + j] = LOWPART (work);
729 carry += HIGHPART (work) != 0;
732 /* If quo_est was high by one, then num[i] went negative and
733 we need to correct things. */
734 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
737 carry = 0; /* add divisor back in */
738 for (j = 0; j <= den_hi_sig; j++)
740 work = num[i + j] + den[j] + carry;
741 carry = HIGHPART (work);
742 num[i + j] = LOWPART (work);
745 num [num_hi_sig] += carry;
748 /* Store the quotient digit. */
753 decode (quo, lquo, hquo);
756 /* If result is negative, make it so. */
758 neg_double (*lquo, *hquo, lquo, hquo);
760 /* Compute trial remainder: rem = num - (quo * den) */
761 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
762 neg_double (*lrem, *hrem, lrem, hrem);
763 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
768 case TRUNC_MOD_EXPR: /* round toward zero */
769 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
773 case FLOOR_MOD_EXPR: /* round toward negative infinity */
774 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
785 case CEIL_MOD_EXPR: /* round toward positive infinity */
786 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
796 case ROUND_MOD_EXPR: /* round to closest integer */
798 unsigned HOST_WIDE_INT labs_rem = *lrem;
799 HOST_WIDE_INT habs_rem = *hrem;
800 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
801 HOST_WIDE_INT habs_den = hden, htwice;
803 /* Get absolute values. */
805 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
807 neg_double (lden, hden, &labs_den, &habs_den);
809 /* If (2 * abs (lrem) >= abs (lden)) */
810 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
811 labs_rem, habs_rem, <wice, &htwice);
813 if (((unsigned HOST_WIDE_INT) habs_den
814 < (unsigned HOST_WIDE_INT) htwice)
815 || (((unsigned HOST_WIDE_INT) habs_den
816 == (unsigned HOST_WIDE_INT) htwice)
817 && (labs_den < ltwice)))
821 add_double (*lquo, *hquo,
822 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
825 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 /* Compute true remainder: rem = num - (quo * den) */
838 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
839 neg_double (*lrem, *hrem, lrem, hrem);
840 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
844 /* If ARG2 divides ARG1 with zero remainder, carries out the division
845 of type CODE and returns the quotient.
846 Otherwise returns NULL_TREE. */
849 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
851 unsigned HOST_WIDE_INT int1l, int2l;
852 HOST_WIDE_INT int1h, int2h;
853 unsigned HOST_WIDE_INT quol, reml;
854 HOST_WIDE_INT quoh, remh;
855 tree type = TREE_TYPE (arg1);
856 int uns = TYPE_UNSIGNED (type);
858 int1l = TREE_INT_CST_LOW (arg1);
859 int1h = TREE_INT_CST_HIGH (arg1);
860 int2l = TREE_INT_CST_LOW (arg2);
861 int2h = TREE_INT_CST_HIGH (arg2);
863 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
864 &quol, &quoh, &reml, &remh);
865 if (remh != 0 || reml != 0)
868 return build_int_cst_wide (type, quol, quoh);
871 /* Return true if the built-in mathematical function specified by CODE
872 is odd, i.e. -f(x) == f(-x). */
875 negate_mathfn_p (enum built_in_function code)
899 /* Check whether we may negate an integer constant T without causing
903 may_negate_without_overflow_p (tree t)
905 unsigned HOST_WIDE_INT val;
909 gcc_assert (TREE_CODE (t) == INTEGER_CST);
911 type = TREE_TYPE (t);
912 if (TYPE_UNSIGNED (type))
915 prec = TYPE_PRECISION (type);
916 if (prec > HOST_BITS_PER_WIDE_INT)
918 if (TREE_INT_CST_LOW (t) != 0)
920 prec -= HOST_BITS_PER_WIDE_INT;
921 val = TREE_INT_CST_HIGH (t);
924 val = TREE_INT_CST_LOW (t);
925 if (prec < HOST_BITS_PER_WIDE_INT)
926 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
927 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
930 /* Determine whether an expression T can be cheaply negated using
931 the function negate_expr. */
934 negate_expr_p (tree t)
941 type = TREE_TYPE (t);
944 switch (TREE_CODE (t))
947 if (TYPE_UNSIGNED (type) || ! flag_trapv)
950 /* Check that -CST will not overflow type. */
951 return may_negate_without_overflow_p (t);
958 return negate_expr_p (TREE_REALPART (t))
959 && negate_expr_p (TREE_IMAGPART (t));
962 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
964 /* -(A + B) -> (-B) - A. */
965 if (negate_expr_p (TREE_OPERAND (t, 1))
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1)))
969 /* -(A + B) -> (-A) - B. */
970 return negate_expr_p (TREE_OPERAND (t, 0));
973 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
974 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
975 && reorder_operands_p (TREE_OPERAND (t, 0),
976 TREE_OPERAND (t, 1));
979 if (TYPE_UNSIGNED (TREE_TYPE (t)))
985 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
986 return negate_expr_p (TREE_OPERAND (t, 1))
987 || negate_expr_p (TREE_OPERAND (t, 0));
991 /* Negate -((double)float) as (double)(-float). */
992 if (TREE_CODE (type) == REAL_TYPE)
994 tree tem = strip_float_extensions (t);
996 return negate_expr_p (tem);
1001 /* Negate -f(x) as f(-x). */
1002 if (negate_mathfn_p (builtin_mathfn_code (t)))
1003 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1007 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1008 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1010 tree op1 = TREE_OPERAND (t, 1);
1011 if (TREE_INT_CST_HIGH (op1) == 0
1012 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1013 == TREE_INT_CST_LOW (op1))
1024 /* Given T, an expression, return the negation of T. Allow for T to be
1025 null, in which case return null. */
1028 negate_expr (tree t)
1036 type = TREE_TYPE (t);
1037 STRIP_SIGN_NOPS (t);
1039 switch (TREE_CODE (t))
1042 tem = fold_negate_const (t, type);
1043 if (! TREE_OVERFLOW (tem)
1044 || TYPE_UNSIGNED (type)
1047 return build1 (NEGATE_EXPR, type, t);
1050 tem = fold_negate_const (t, type);
1051 /* Two's complement FP formats, such as c4x, may overflow. */
1052 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1053 return fold_convert (type, tem);
1054 return build1 (NEGATE_EXPR, type, t);
1058 tree rpart = negate_expr (TREE_REALPART (t));
1059 tree ipart = negate_expr (TREE_IMAGPART (t));
1061 if ((TREE_CODE (rpart) == REAL_CST
1062 && TREE_CODE (ipart) == REAL_CST)
1063 || (TREE_CODE (rpart) == INTEGER_CST
1064 && TREE_CODE (ipart) == INTEGER_CST))
1065 return build_complex (type, rpart, ipart);
1070 return fold_convert (type, TREE_OPERAND (t, 0));
1073 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1075 /* -(A + B) -> (-B) - A. */
1076 if (negate_expr_p (TREE_OPERAND (t, 1))
1077 && reorder_operands_p (TREE_OPERAND (t, 0),
1078 TREE_OPERAND (t, 1)))
1080 tem = negate_expr (TREE_OPERAND (t, 1));
1081 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1082 tem, TREE_OPERAND (t, 0));
1083 return fold_convert (type, tem);
1086 /* -(A + B) -> (-A) - B. */
1087 if (negate_expr_p (TREE_OPERAND (t, 0)))
1089 tem = negate_expr (TREE_OPERAND (t, 0));
1090 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1091 tem, TREE_OPERAND (t, 1));
1092 return fold_convert (type, tem);
1098 /* - (A - B) -> B - A */
1099 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1100 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1101 return fold_convert (type,
1102 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1103 TREE_OPERAND (t, 1),
1104 TREE_OPERAND (t, 0)));
1108 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1114 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1116 tem = TREE_OPERAND (t, 1);
1117 if (negate_expr_p (tem))
1118 return fold_convert (type,
1119 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1120 TREE_OPERAND (t, 0),
1121 negate_expr (tem)));
1122 tem = TREE_OPERAND (t, 0);
1123 if (negate_expr_p (tem))
1124 return fold_convert (type,
1125 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1127 TREE_OPERAND (t, 1)));
1132 /* Convert -((double)float) into (double)(-float). */
1133 if (TREE_CODE (type) == REAL_TYPE)
1135 tem = strip_float_extensions (t);
1136 if (tem != t && negate_expr_p (tem))
1137 return fold_convert (type, negate_expr (tem));
1142 /* Negate -f(x) as f(-x). */
1143 if (negate_mathfn_p (builtin_mathfn_code (t))
1144 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1146 tree fndecl, arg, arglist;
1148 fndecl = get_callee_fndecl (t);
1149 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1150 arglist = build_tree_list (NULL_TREE, arg);
1151 return build_function_call_expr (fndecl, arglist);
1156 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1157 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1159 tree op1 = TREE_OPERAND (t, 1);
1160 if (TREE_INT_CST_HIGH (op1) == 0
1161 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1162 == TREE_INT_CST_LOW (op1))
1164 tree ntype = TYPE_UNSIGNED (type)
1165 ? lang_hooks.types.signed_type (type)
1166 : lang_hooks.types.unsigned_type (type);
1167 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1168 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1169 return fold_convert (type, temp);
1178 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1179 return fold_convert (type, tem);
1182 /* Split a tree IN into a constant, literal and variable parts that could be
1183 combined with CODE to make IN. "constant" means an expression with
1184 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1185 commutative arithmetic operation. Store the constant part into *CONP,
1186 the literal in *LITP and return the variable part. If a part isn't
1187 present, set it to null. If the tree does not decompose in this way,
1188 return the entire tree as the variable part and the other parts as null.
1190 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1191 case, we negate an operand that was subtracted. Except if it is a
1192 literal for which we use *MINUS_LITP instead.
1194 If NEGATE_P is true, we are negating all of IN, again except a literal
1195 for which we use *MINUS_LITP instead.
1197 If IN is itself a literal or constant, return it as appropriate.
1199 Note that we do not guarantee that any of the three values will be the
1200 same type as IN, but they will have the same signedness and mode. */
1203 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1204 tree *minus_litp, int negate_p)
1212 /* Strip any conversions that don't change the machine mode or signedness. */
1213 STRIP_SIGN_NOPS (in);
1215 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1217 else if (TREE_CODE (in) == code
1218 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1219 /* We can associate addition and subtraction together (even
1220 though the C standard doesn't say so) for integers because
1221 the value is not affected. For reals, the value might be
1222 affected, so we can't. */
1223 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1224 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1226 tree op0 = TREE_OPERAND (in, 0);
1227 tree op1 = TREE_OPERAND (in, 1);
1228 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1229 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1231 /* First see if either of the operands is a literal, then a constant. */
1232 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1233 *litp = op0, op0 = 0;
1234 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1235 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1237 if (op0 != 0 && TREE_CONSTANT (op0))
1238 *conp = op0, op0 = 0;
1239 else if (op1 != 0 && TREE_CONSTANT (op1))
1240 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1242 /* If we haven't dealt with either operand, this is not a case we can
1243 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1244 if (op0 != 0 && op1 != 0)
1249 var = op1, neg_var_p = neg1_p;
1251 /* Now do any needed negations. */
1253 *minus_litp = *litp, *litp = 0;
1255 *conp = negate_expr (*conp);
1257 var = negate_expr (var);
1259 else if (TREE_CONSTANT (in))
1267 *minus_litp = *litp, *litp = 0;
1268 else if (*minus_litp)
1269 *litp = *minus_litp, *minus_litp = 0;
1270 *conp = negate_expr (*conp);
1271 var = negate_expr (var);
1277 /* Re-associate trees split by the above function. T1 and T2 are either
1278 expressions to associate or null. Return the new expression, if any. If
1279 we build an operation, do it in TYPE and with CODE. */
1282 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1289 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1290 try to fold this since we will have infinite recursion. But do
1291 deal with any NEGATE_EXPRs. */
1292 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1293 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1295 if (code == PLUS_EXPR)
1297 if (TREE_CODE (t1) == NEGATE_EXPR)
1298 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1299 fold_convert (type, TREE_OPERAND (t1, 0)));
1300 else if (TREE_CODE (t2) == NEGATE_EXPR)
1301 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1302 fold_convert (type, TREE_OPERAND (t2, 0)));
1303 else if (integer_zerop (t2))
1304 return fold_convert (type, t1);
1306 else if (code == MINUS_EXPR)
1308 if (integer_zerop (t2))
1309 return fold_convert (type, t1);
1312 return build2 (code, type, fold_convert (type, t1),
1313 fold_convert (type, t2));
1316 return fold_build2 (code, type, fold_convert (type, t1),
1317 fold_convert (type, t2));
1320 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1321 to produce a new constant.
1323 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1326 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1328 unsigned HOST_WIDE_INT int1l, int2l;
1329 HOST_WIDE_INT int1h, int2h;
1330 unsigned HOST_WIDE_INT low;
1332 unsigned HOST_WIDE_INT garbagel;
1333 HOST_WIDE_INT garbageh;
1335 tree type = TREE_TYPE (arg1);
1336 int uns = TYPE_UNSIGNED (type);
1338 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1341 int1l = TREE_INT_CST_LOW (arg1);
1342 int1h = TREE_INT_CST_HIGH (arg1);
1343 int2l = TREE_INT_CST_LOW (arg2);
1344 int2h = TREE_INT_CST_HIGH (arg2);
1349 low = int1l | int2l, hi = int1h | int2h;
1353 low = int1l ^ int2l, hi = int1h ^ int2h;
1357 low = int1l & int2l, hi = int1h & int2h;
1363 /* It's unclear from the C standard whether shifts can overflow.
1364 The following code ignores overflow; perhaps a C standard
1365 interpretation ruling is needed. */
1366 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1373 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1378 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1382 neg_double (int2l, int2h, &low, &hi);
1383 add_double (int1l, int1h, low, hi, &low, &hi);
1384 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1388 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1391 case TRUNC_DIV_EXPR:
1392 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1393 case EXACT_DIV_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_DIV_EXPR)
1403 low = int1l / int2l, hi = 0;
1407 /* ... fall through ... */
1409 case ROUND_DIV_EXPR:
1410 if (int2h == 0 && int2l == 1)
1412 low = int1l, hi = int1h;
1415 if (int1l == int2l && int1h == int2h
1416 && ! (int1l == 0 && int1h == 0))
1421 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1422 &low, &hi, &garbagel, &garbageh);
1425 case TRUNC_MOD_EXPR:
1426 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_MOD_EXPR)
1435 low = int1l % int2l, hi = 0;
1439 /* ... fall through ... */
1441 case ROUND_MOD_EXPR:
1442 overflow = div_and_round_double (code, uns,
1443 int1l, int1h, int2l, int2h,
1444 &garbagel, &garbageh, &low, &hi);
1450 low = (((unsigned HOST_WIDE_INT) int1h
1451 < (unsigned HOST_WIDE_INT) int2h)
1452 || (((unsigned HOST_WIDE_INT) int1h
1453 == (unsigned HOST_WIDE_INT) int2h)
1456 low = (int1h < int2h
1457 || (int1h == int2h && int1l < int2l));
1459 if (low == (code == MIN_EXPR))
1460 low = int1l, hi = int1h;
1462 low = int2l, hi = int2h;
1469 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1473 /* Propagate overflow flags ourselves. */
1474 if (((!uns || is_sizetype) && overflow)
1475 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1478 TREE_OVERFLOW (t) = 1;
1479 TREE_CONSTANT_OVERFLOW (t) = 1;
1481 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1484 TREE_CONSTANT_OVERFLOW (t) = 1;
1488 t = force_fit_type (t, 1,
1489 ((!uns || is_sizetype) && overflow)
1490 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1491 TREE_CONSTANT_OVERFLOW (arg1)
1492 | TREE_CONSTANT_OVERFLOW (arg2));
1497 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1498 constant. We assume ARG1 and ARG2 have the same data type, or at least
1499 are the same kind of constant and the same machine mode. Return zero if
1500 combining the constants is not allowed in the current operating mode.
1502 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1505 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1507 /* Sanity check for the recursive cases. */
1514 if (TREE_CODE (arg1) == INTEGER_CST)
1515 return int_const_binop (code, arg1, arg2, notrunc);
1517 if (TREE_CODE (arg1) == REAL_CST)
1519 enum machine_mode mode;
1522 REAL_VALUE_TYPE value;
1523 REAL_VALUE_TYPE result;
1527 d1 = TREE_REAL_CST (arg1);
1528 d2 = TREE_REAL_CST (arg2);
1530 type = TREE_TYPE (arg1);
1531 mode = TYPE_MODE (type);
1533 /* Don't perform operation if we honor signaling NaNs and
1534 either operand is a NaN. */
1535 if (HONOR_SNANS (mode)
1536 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1539 /* Don't perform operation if it would raise a division
1540 by zero exception. */
1541 if (code == RDIV_EXPR
1542 && REAL_VALUES_EQUAL (d2, dconst0)
1543 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1546 /* If either operand is a NaN, just return it. Otherwise, set up
1547 for floating-point trap; we return an overflow. */
1548 if (REAL_VALUE_ISNAN (d1))
1550 else if (REAL_VALUE_ISNAN (d2))
1553 inexact = real_arithmetic (&value, code, &d1, &d2);
1554 real_convert (&result, mode, &value);
1556 /* Don't constant fold this floating point operation if
1557 the result has overflowed and flag_trapping_math. */
1558 if (flag_trapping_math
1559 && MODE_HAS_INFINITIES (mode)
1560 && REAL_VALUE_ISINF (result)
1561 && !REAL_VALUE_ISINF (d1)
1562 && !REAL_VALUE_ISINF (d2))
1565 /* Don't constant fold this floating point operation if the
1566 result may dependent upon the run-time rounding mode and
1567 flag_rounding_math is set, or if GCC's software emulation
1568 is unable to accurately represent the result. */
1569 if ((flag_rounding_math
1570 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1571 && !flag_unsafe_math_optimizations))
1572 && (inexact || !real_identical (&result, &value)))
1575 t = build_real (type, result);
1577 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1578 TREE_CONSTANT_OVERFLOW (t)
1580 | TREE_CONSTANT_OVERFLOW (arg1)
1581 | TREE_CONSTANT_OVERFLOW (arg2);
1585 if (TREE_CODE (arg1) == COMPLEX_CST)
1587 tree type = TREE_TYPE (arg1);
1588 tree r1 = TREE_REALPART (arg1);
1589 tree i1 = TREE_IMAGPART (arg1);
1590 tree r2 = TREE_REALPART (arg2);
1591 tree i2 = TREE_IMAGPART (arg2);
1598 real = const_binop (code, r1, r2, notrunc);
1599 imag = const_binop (code, i1, i2, notrunc);
1603 real = const_binop (MINUS_EXPR,
1604 const_binop (MULT_EXPR, r1, r2, notrunc),
1605 const_binop (MULT_EXPR, i1, i2, notrunc),
1607 imag = const_binop (PLUS_EXPR,
1608 const_binop (MULT_EXPR, r1, i2, notrunc),
1609 const_binop (MULT_EXPR, i1, r2, notrunc),
1616 = const_binop (PLUS_EXPR,
1617 const_binop (MULT_EXPR, r2, r2, notrunc),
1618 const_binop (MULT_EXPR, i2, i2, notrunc),
1621 = const_binop (PLUS_EXPR,
1622 const_binop (MULT_EXPR, r1, r2, notrunc),
1623 const_binop (MULT_EXPR, i1, i2, notrunc),
1626 = const_binop (MINUS_EXPR,
1627 const_binop (MULT_EXPR, i1, r2, notrunc),
1628 const_binop (MULT_EXPR, r1, i2, notrunc),
1631 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1632 code = TRUNC_DIV_EXPR;
1634 real = const_binop (code, t1, magsquared, notrunc);
1635 imag = const_binop (code, t2, magsquared, notrunc);
1644 return build_complex (type, real, imag);
1650 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1651 indicates which particular sizetype to create. */
1654 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1656 return build_int_cst (sizetype_tab[(int) kind], number);
1659 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1660 is a tree code. The type of the result is taken from the operands.
1661 Both must be the same type integer type and it must be a size type.
1662 If the operands are constant, so is the result. */
1665 size_binop (enum tree_code code, tree arg0, tree arg1)
1667 tree type = TREE_TYPE (arg0);
1669 if (arg0 == error_mark_node || arg1 == error_mark_node)
1670 return error_mark_node;
1672 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1673 && type == TREE_TYPE (arg1));
1675 /* Handle the special case of two integer constants faster. */
1676 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1678 /* And some specific cases even faster than that. */
1679 if (code == PLUS_EXPR && integer_zerop (arg0))
1681 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1682 && integer_zerop (arg1))
1684 else if (code == MULT_EXPR && integer_onep (arg0))
1687 /* Handle general case of two integer constants. */
1688 return int_const_binop (code, arg0, arg1, 0);
1691 return fold_build2 (code, type, arg0, arg1);
1694 /* Given two values, either both of sizetype or both of bitsizetype,
1695 compute the difference between the two values. Return the value
1696 in signed type corresponding to the type of the operands. */
1699 size_diffop (tree arg0, tree arg1)
1701 tree type = TREE_TYPE (arg0);
1704 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1705 && type == TREE_TYPE (arg1));
1707 /* If the type is already signed, just do the simple thing. */
1708 if (!TYPE_UNSIGNED (type))
1709 return size_binop (MINUS_EXPR, arg0, arg1);
1711 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1713 /* If either operand is not a constant, do the conversions to the signed
1714 type and subtract. The hardware will do the right thing with any
1715 overflow in the subtraction. */
1716 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1717 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1718 fold_convert (ctype, arg1));
1720 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1721 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1722 overflow) and negate (which can't either). Special-case a result
1723 of zero while we're here. */
1724 if (tree_int_cst_equal (arg0, arg1))
1725 return fold_convert (ctype, integer_zero_node);
1726 else if (tree_int_cst_lt (arg1, arg0))
1727 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1729 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1730 fold_convert (ctype, size_binop (MINUS_EXPR,
1734 /* A subroutine of fold_convert_const handling conversions of an
1735 INTEGER_CST to another integer type. */
1738 fold_convert_const_int_from_int (tree type, tree arg1)
1742 /* Given an integer constant, make new constant with new type,
1743 appropriately sign-extended or truncated. */
1744 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1745 TREE_INT_CST_HIGH (arg1));
1747 t = force_fit_type (t,
1748 /* Don't set the overflow when
1749 converting a pointer */
1750 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1751 (TREE_INT_CST_HIGH (arg1) < 0
1752 && (TYPE_UNSIGNED (type)
1753 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1754 | TREE_OVERFLOW (arg1),
1755 TREE_CONSTANT_OVERFLOW (arg1));
1760 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1761 to an integer type. */
1764 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1769 /* The following code implements the floating point to integer
1770 conversion rules required by the Java Language Specification,
1771 that IEEE NaNs are mapped to zero and values that overflow
1772 the target precision saturate, i.e. values greater than
1773 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1774 are mapped to INT_MIN. These semantics are allowed by the
1775 C and C++ standards that simply state that the behavior of
1776 FP-to-integer conversion is unspecified upon overflow. */
1778 HOST_WIDE_INT high, low;
1780 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1784 case FIX_TRUNC_EXPR:
1785 real_trunc (&r, VOIDmode, &x);
1789 real_ceil (&r, VOIDmode, &x);
1792 case FIX_FLOOR_EXPR:
1793 real_floor (&r, VOIDmode, &x);
1796 case FIX_ROUND_EXPR:
1797 real_round (&r, VOIDmode, &x);
1804 /* If R is NaN, return zero and show we have an overflow. */
1805 if (REAL_VALUE_ISNAN (r))
1812 /* See if R is less than the lower bound or greater than the
1817 tree lt = TYPE_MIN_VALUE (type);
1818 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1819 if (REAL_VALUES_LESS (r, l))
1822 high = TREE_INT_CST_HIGH (lt);
1823 low = TREE_INT_CST_LOW (lt);
1829 tree ut = TYPE_MAX_VALUE (type);
1832 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1833 if (REAL_VALUES_LESS (u, r))
1836 high = TREE_INT_CST_HIGH (ut);
1837 low = TREE_INT_CST_LOW (ut);
1843 REAL_VALUE_TO_INT (&low, &high, r);
1845 t = build_int_cst_wide (type, low, high);
1847 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1848 TREE_CONSTANT_OVERFLOW (arg1));
1852 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1853 to another floating point type. */
1856 fold_convert_const_real_from_real (tree type, tree arg1)
1858 REAL_VALUE_TYPE value;
1861 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1862 t = build_real (type, value);
1864 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1865 TREE_CONSTANT_OVERFLOW (t)
1866 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1870 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1871 type TYPE. If no simplification can be done return NULL_TREE. */
1874 fold_convert_const (enum tree_code code, tree type, tree arg1)
1876 if (TREE_TYPE (arg1) == type)
1879 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return fold_convert_const_int_from_int (type, arg1);
1883 else if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_int_from_real (code, type, arg1);
1886 else if (TREE_CODE (type) == REAL_TYPE)
1888 if (TREE_CODE (arg1) == INTEGER_CST)
1889 return build_real_from_int_cst (type, arg1);
1890 if (TREE_CODE (arg1) == REAL_CST)
1891 return fold_convert_const_real_from_real (type, arg1);
1896 /* Construct a vector of zero elements of vector type TYPE. */
1899 build_zero_vector (tree type)
1904 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1905 units = TYPE_VECTOR_SUBPARTS (type);
1908 for (i = 0; i < units; i++)
1909 list = tree_cons (NULL_TREE, elem, list);
1910 return build_vector (type, list);
1913 /* Convert expression ARG to type TYPE. Used by the middle-end for
1914 simple conversions in preference to calling the front-end's convert. */
1917 fold_convert (tree type, tree arg)
1919 tree orig = TREE_TYPE (arg);
1925 if (TREE_CODE (arg) == ERROR_MARK
1926 || TREE_CODE (type) == ERROR_MARK
1927 || TREE_CODE (orig) == ERROR_MARK)
1928 return error_mark_node;
1930 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1931 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1932 TYPE_MAIN_VARIANT (orig)))
1933 return fold_build1 (NOP_EXPR, type, arg);
1935 switch (TREE_CODE (type))
1937 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1938 case POINTER_TYPE: case REFERENCE_TYPE:
1940 if (TREE_CODE (arg) == INTEGER_CST)
1942 tem = fold_convert_const (NOP_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1946 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1947 || TREE_CODE (orig) == OFFSET_TYPE)
1948 return fold_build1 (NOP_EXPR, type, arg);
1949 if (TREE_CODE (orig) == COMPLEX_TYPE)
1951 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1952 return fold_convert (type, tem);
1954 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1955 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1956 return fold_build1 (NOP_EXPR, type, arg);
1959 if (TREE_CODE (arg) == INTEGER_CST)
1961 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1962 if (tem != NULL_TREE)
1965 else if (TREE_CODE (arg) == REAL_CST)
1967 tem = fold_convert_const (NOP_EXPR, type, arg);
1968 if (tem != NULL_TREE)
1972 switch (TREE_CODE (orig))
1974 case INTEGER_TYPE: case CHAR_TYPE:
1975 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1976 case POINTER_TYPE: case REFERENCE_TYPE:
1977 return fold_build1 (FLOAT_EXPR, type, arg);
1980 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1984 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1985 return fold_convert (type, tem);
1992 switch (TREE_CODE (orig))
1994 case INTEGER_TYPE: case CHAR_TYPE:
1995 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1996 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return build2 (COMPLEX_EXPR, type,
1999 fold_convert (TREE_TYPE (type), arg),
2000 fold_convert (TREE_TYPE (type), integer_zero_node));
2005 if (TREE_CODE (arg) == COMPLEX_EXPR)
2007 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2008 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 arg = save_expr (arg);
2013 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2014 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2015 rpart = fold_convert (TREE_TYPE (type), rpart);
2016 ipart = fold_convert (TREE_TYPE (type), ipart);
2017 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2025 if (integer_zerop (arg))
2026 return build_zero_vector (type);
2027 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2028 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2029 || TREE_CODE (orig) == VECTOR_TYPE);
2030 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2033 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2040 /* Return false if expr can be assumed not to be an lvalue, true
2044 maybe_lvalue_p (tree x)
2046 /* We only need to wrap lvalue tree codes. */
2047 switch (TREE_CODE (x))
2058 case ALIGN_INDIRECT_REF:
2059 case MISALIGNED_INDIRECT_REF:
2061 case ARRAY_RANGE_REF:
2067 case PREINCREMENT_EXPR:
2068 case PREDECREMENT_EXPR:
2070 case TRY_CATCH_EXPR:
2071 case WITH_CLEANUP_EXPR:
2082 /* Assume the worst for front-end tree codes. */
2083 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2091 /* Return an expr equal to X but certainly not valid as an lvalue. */
2096 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2101 if (! maybe_lvalue_p (x))
2103 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2106 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2107 Zero means allow extended lvalues. */
2109 int pedantic_lvalues;
2111 /* When pedantic, return an expr equal to X but certainly not valid as a
2112 pedantic lvalue. Otherwise, return X. */
2115 pedantic_non_lvalue (tree x)
2117 if (pedantic_lvalues)
2118 return non_lvalue (x);
2123 /* Given a tree comparison code, return the code that is the logical inverse
2124 of the given code. It is not safe to do this for floating-point
2125 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2126 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2129 invert_tree_comparison (enum tree_code code, bool honor_nans)
2131 if (honor_nans && flag_trapping_math)
2141 return honor_nans ? UNLE_EXPR : LE_EXPR;
2143 return honor_nans ? UNLT_EXPR : LT_EXPR;
2145 return honor_nans ? UNGE_EXPR : GE_EXPR;
2147 return honor_nans ? UNGT_EXPR : GT_EXPR;
2161 return UNORDERED_EXPR;
2162 case UNORDERED_EXPR:
2163 return ORDERED_EXPR;
2169 /* Similar, but return the comparison that results if the operands are
2170 swapped. This is safe for floating-point. */
2173 swap_tree_comparison (enum tree_code code)
2180 case UNORDERED_EXPR:
2206 /* Convert a comparison tree code from an enum tree_code representation
2207 into a compcode bit-based encoding. This function is the inverse of
2208 compcode_to_comparison. */
2210 static enum comparison_code
2211 comparison_to_compcode (enum tree_code code)
2228 return COMPCODE_ORD;
2229 case UNORDERED_EXPR:
2230 return COMPCODE_UNORD;
2232 return COMPCODE_UNLT;
2234 return COMPCODE_UNEQ;
2236 return COMPCODE_UNLE;
2238 return COMPCODE_UNGT;
2240 return COMPCODE_LTGT;
2242 return COMPCODE_UNGE;
2248 /* Convert a compcode bit-based encoding of a comparison operator back
2249 to GCC's enum tree_code representation. This function is the
2250 inverse of comparison_to_compcode. */
2252 static enum tree_code
2253 compcode_to_comparison (enum comparison_code code)
2270 return ORDERED_EXPR;
2271 case COMPCODE_UNORD:
2272 return UNORDERED_EXPR;
2290 /* Return a tree for the comparison which is the combination of
2291 doing the AND or OR (depending on CODE) of the two operations LCODE
2292 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2293 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2294 if this makes the transformation invalid. */
2297 combine_comparisons (enum tree_code code, enum tree_code lcode,
2298 enum tree_code rcode, tree truth_type,
2299 tree ll_arg, tree lr_arg)
2301 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2302 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2303 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2304 enum comparison_code compcode;
2308 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2309 compcode = lcompcode & rcompcode;
2312 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2313 compcode = lcompcode | rcompcode;
2322 /* Eliminate unordered comparisons, as well as LTGT and ORD
2323 which are not used unless the mode has NaNs. */
2324 compcode &= ~COMPCODE_UNORD;
2325 if (compcode == COMPCODE_LTGT)
2326 compcode = COMPCODE_NE;
2327 else if (compcode == COMPCODE_ORD)
2328 compcode = COMPCODE_TRUE;
2330 else if (flag_trapping_math)
2332 /* Check that the original operation and the optimized ones will trap
2333 under the same condition. */
2334 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2335 && (lcompcode != COMPCODE_EQ)
2336 && (lcompcode != COMPCODE_ORD);
2337 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2338 && (rcompcode != COMPCODE_EQ)
2339 && (rcompcode != COMPCODE_ORD);
2340 bool trap = (compcode & COMPCODE_UNORD) == 0
2341 && (compcode != COMPCODE_EQ)
2342 && (compcode != COMPCODE_ORD);
2344 /* In a short-circuited boolean expression the LHS might be
2345 such that the RHS, if evaluated, will never trap. For
2346 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2347 if neither x nor y is NaN. (This is a mixed blessing: for
2348 example, the expression above will never trap, hence
2349 optimizing it to x < y would be invalid). */
2350 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2351 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2354 /* If the comparison was short-circuited, and only the RHS
2355 trapped, we may now generate a spurious trap. */
2357 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2360 /* If we changed the conditions that cause a trap, we lose. */
2361 if ((ltrap || rtrap) != trap)
2365 if (compcode == COMPCODE_TRUE)
2366 return constant_boolean_node (true, truth_type);
2367 else if (compcode == COMPCODE_FALSE)
2368 return constant_boolean_node (false, truth_type);
2370 return fold_build2 (compcode_to_comparison (compcode),
2371 truth_type, ll_arg, lr_arg);
2374 /* Return nonzero if CODE is a tree code that represents a truth value. */
2377 truth_value_p (enum tree_code code)
2379 return (TREE_CODE_CLASS (code) == tcc_comparison
2380 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2381 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2382 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2385 /* Return nonzero if two operands (typically of the same tree node)
2386 are necessarily equal. If either argument has side-effects this
2387 function returns zero. FLAGS modifies behavior as follows:
2389 If OEP_ONLY_CONST is set, only return nonzero for constants.
2390 This function tests whether the operands are indistinguishable;
2391 it does not test whether they are equal using C's == operation.
2392 The distinction is important for IEEE floating point, because
2393 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2394 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2396 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2397 even though it may hold multiple values during a function.
2398 This is because a GCC tree node guarantees that nothing else is
2399 executed between the evaluation of its "operands" (which may often
2400 be evaluated in arbitrary order). Hence if the operands themselves
2401 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2402 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2403 unset means assuming isochronic (or instantaneous) tree equivalence.
2404 Unless comparing arbitrary expression trees, such as from different
2405 statements, this flag can usually be left unset.
2407 If OEP_PURE_SAME is set, then pure functions with identical arguments
2408 are considered the same. It is used when the caller has other ways
2409 to ensure that global memory is unchanged in between. */
2412 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2414 /* If either is ERROR_MARK, they aren't equal. */
2415 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2418 /* If both types don't have the same signedness, then we can't consider
2419 them equal. We must check this before the STRIP_NOPS calls
2420 because they may change the signedness of the arguments. */
2421 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2424 /* If both types don't have the same precision, then it is not safe
2426 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2432 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2433 /* This is needed for conversions and for COMPONENT_REF.
2434 Might as well play it safe and always test this. */
2435 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2436 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2437 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2440 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2441 We don't care about side effects in that case because the SAVE_EXPR
2442 takes care of that for us. In all other cases, two expressions are
2443 equal if they have no side effects. If we have two identical
2444 expressions with side effects that should be treated the same due
2445 to the only side effects being identical SAVE_EXPR's, that will
2446 be detected in the recursive calls below. */
2447 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2448 && (TREE_CODE (arg0) == SAVE_EXPR
2449 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2452 /* Next handle constant cases, those for which we can return 1 even
2453 if ONLY_CONST is set. */
2454 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2455 switch (TREE_CODE (arg0))
2458 return (! TREE_CONSTANT_OVERFLOW (arg0)
2459 && ! TREE_CONSTANT_OVERFLOW (arg1)
2460 && tree_int_cst_equal (arg0, arg1));
2463 return (! TREE_CONSTANT_OVERFLOW (arg0)
2464 && ! TREE_CONSTANT_OVERFLOW (arg1)
2465 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2466 TREE_REAL_CST (arg1)));
2472 if (TREE_CONSTANT_OVERFLOW (arg0)
2473 || TREE_CONSTANT_OVERFLOW (arg1))
2476 v1 = TREE_VECTOR_CST_ELTS (arg0);
2477 v2 = TREE_VECTOR_CST_ELTS (arg1);
2480 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2483 v1 = TREE_CHAIN (v1);
2484 v2 = TREE_CHAIN (v2);
2491 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2493 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2497 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2498 && ! memcmp (TREE_STRING_POINTER (arg0),
2499 TREE_STRING_POINTER (arg1),
2500 TREE_STRING_LENGTH (arg0)));
2503 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2509 if (flags & OEP_ONLY_CONST)
2512 /* Define macros to test an operand from arg0 and arg1 for equality and a
2513 variant that allows null and views null as being different from any
2514 non-null value. In the latter case, if either is null, the both
2515 must be; otherwise, do the normal comparison. */
2516 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2517 TREE_OPERAND (arg1, N), flags)
2519 #define OP_SAME_WITH_NULL(N) \
2520 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2521 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2523 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2526 /* Two conversions are equal only if signedness and modes match. */
2527 switch (TREE_CODE (arg0))
2532 case FIX_TRUNC_EXPR:
2533 case FIX_FLOOR_EXPR:
2534 case FIX_ROUND_EXPR:
2535 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2536 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2546 case tcc_comparison:
2548 if (OP_SAME (0) && OP_SAME (1))
2551 /* For commutative ops, allow the other order. */
2552 return (commutative_tree_code (TREE_CODE (arg0))
2553 && operand_equal_p (TREE_OPERAND (arg0, 0),
2554 TREE_OPERAND (arg1, 1), flags)
2555 && operand_equal_p (TREE_OPERAND (arg0, 1),
2556 TREE_OPERAND (arg1, 0), flags));
2559 /* If either of the pointer (or reference) expressions we are
2560 dereferencing contain a side effect, these cannot be equal. */
2561 if (TREE_SIDE_EFFECTS (arg0)
2562 || TREE_SIDE_EFFECTS (arg1))
2565 switch (TREE_CODE (arg0))
2568 case ALIGN_INDIRECT_REF:
2569 case MISALIGNED_INDIRECT_REF:
2575 case ARRAY_RANGE_REF:
2576 /* Operands 2 and 3 may be null. */
2579 && OP_SAME_WITH_NULL (2)
2580 && OP_SAME_WITH_NULL (3));
2583 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2584 may be NULL when we're called to compare MEM_EXPRs. */
2585 return OP_SAME_WITH_NULL (0)
2587 && OP_SAME_WITH_NULL (2);
2590 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2596 case tcc_expression:
2597 switch (TREE_CODE (arg0))
2600 case TRUTH_NOT_EXPR:
2603 case TRUTH_ANDIF_EXPR:
2604 case TRUTH_ORIF_EXPR:
2605 return OP_SAME (0) && OP_SAME (1);
2607 case TRUTH_AND_EXPR:
2609 case TRUTH_XOR_EXPR:
2610 if (OP_SAME (0) && OP_SAME (1))
2613 /* Otherwise take into account this is a commutative operation. */
2614 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2615 TREE_OPERAND (arg1, 1), flags)
2616 && operand_equal_p (TREE_OPERAND (arg0, 1),
2617 TREE_OPERAND (arg1, 0), flags));
2620 /* If the CALL_EXPRs call different functions, then they
2621 clearly can not be equal. */
2626 unsigned int cef = call_expr_flags (arg0);
2627 if (flags & OEP_PURE_SAME)
2628 cef &= ECF_CONST | ECF_PURE;
2635 /* Now see if all the arguments are the same. operand_equal_p
2636 does not handle TREE_LIST, so we walk the operands here
2637 feeding them to operand_equal_p. */
2638 arg0 = TREE_OPERAND (arg0, 1);
2639 arg1 = TREE_OPERAND (arg1, 1);
2640 while (arg0 && arg1)
2642 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2646 arg0 = TREE_CHAIN (arg0);
2647 arg1 = TREE_CHAIN (arg1);
2650 /* If we get here and both argument lists are exhausted
2651 then the CALL_EXPRs are equal. */
2652 return ! (arg0 || arg1);
2658 case tcc_declaration:
2659 /* Consider __builtin_sqrt equal to sqrt. */
2660 return (TREE_CODE (arg0) == FUNCTION_DECL
2661 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2662 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2663 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2670 #undef OP_SAME_WITH_NULL
2673 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2674 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2676 When in doubt, return 0. */
2679 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2681 int unsignedp1, unsignedpo;
2682 tree primarg0, primarg1, primother;
2683 unsigned int correct_width;
2685 if (operand_equal_p (arg0, arg1, 0))
2688 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2689 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2692 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2693 and see if the inner values are the same. This removes any
2694 signedness comparison, which doesn't matter here. */
2695 primarg0 = arg0, primarg1 = arg1;
2696 STRIP_NOPS (primarg0);
2697 STRIP_NOPS (primarg1);
2698 if (operand_equal_p (primarg0, primarg1, 0))
2701 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2702 actual comparison operand, ARG0.
2704 First throw away any conversions to wider types
2705 already present in the operands. */
2707 primarg1 = get_narrower (arg1, &unsignedp1);
2708 primother = get_narrower (other, &unsignedpo);
2710 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2711 if (unsignedp1 == unsignedpo
2712 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2713 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2715 tree type = TREE_TYPE (arg0);
2717 /* Make sure shorter operand is extended the right way
2718 to match the longer operand. */
2719 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2720 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2722 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2729 /* See if ARG is an expression that is either a comparison or is performing
2730 arithmetic on comparisons. The comparisons must only be comparing
2731 two different values, which will be stored in *CVAL1 and *CVAL2; if
2732 they are nonzero it means that some operands have already been found.
2733 No variables may be used anywhere else in the expression except in the
2734 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2735 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2737 If this is true, return 1. Otherwise, return zero. */
2740 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2742 enum tree_code code = TREE_CODE (arg);
2743 enum tree_code_class class = TREE_CODE_CLASS (code);
2745 /* We can handle some of the tcc_expression cases here. */
2746 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2748 else if (class == tcc_expression
2749 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2750 || code == COMPOUND_EXPR))
2753 else if (class == tcc_expression && code == SAVE_EXPR
2754 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2756 /* If we've already found a CVAL1 or CVAL2, this expression is
2757 two complex to handle. */
2758 if (*cval1 || *cval2)
2768 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2771 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2772 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2773 cval1, cval2, save_p));
2778 case tcc_expression:
2779 if (code == COND_EXPR)
2780 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2781 cval1, cval2, save_p)
2782 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2783 cval1, cval2, save_p)
2784 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2785 cval1, cval2, save_p));
2788 case tcc_comparison:
2789 /* First see if we can handle the first operand, then the second. For
2790 the second operand, we know *CVAL1 can't be zero. It must be that
2791 one side of the comparison is each of the values; test for the
2792 case where this isn't true by failing if the two operands
2795 if (operand_equal_p (TREE_OPERAND (arg, 0),
2796 TREE_OPERAND (arg, 1), 0))
2800 *cval1 = TREE_OPERAND (arg, 0);
2801 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2803 else if (*cval2 == 0)
2804 *cval2 = TREE_OPERAND (arg, 0);
2805 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2810 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2812 else if (*cval2 == 0)
2813 *cval2 = TREE_OPERAND (arg, 1);
2814 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2826 /* ARG is a tree that is known to contain just arithmetic operations and
2827 comparisons. Evaluate the operations in the tree substituting NEW0 for
2828 any occurrence of OLD0 as an operand of a comparison and likewise for
2832 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2834 tree type = TREE_TYPE (arg);
2835 enum tree_code code = TREE_CODE (arg);
2836 enum tree_code_class class = TREE_CODE_CLASS (code);
2838 /* We can handle some of the tcc_expression cases here. */
2839 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2841 else if (class == tcc_expression
2842 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2848 return fold_build1 (code, type,
2849 eval_subst (TREE_OPERAND (arg, 0),
2850 old0, new0, old1, new1));
2853 return fold_build2 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1));
2859 case tcc_expression:
2863 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2866 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2869 return fold_build3 (code, type,
2870 eval_subst (TREE_OPERAND (arg, 0),
2871 old0, new0, old1, new1),
2872 eval_subst (TREE_OPERAND (arg, 1),
2873 old0, new0, old1, new1),
2874 eval_subst (TREE_OPERAND (arg, 2),
2875 old0, new0, old1, new1));
2879 /* Fall through - ??? */
2881 case tcc_comparison:
2883 tree arg0 = TREE_OPERAND (arg, 0);
2884 tree arg1 = TREE_OPERAND (arg, 1);
2886 /* We need to check both for exact equality and tree equality. The
2887 former will be true if the operand has a side-effect. In that
2888 case, we know the operand occurred exactly once. */
2890 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2892 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2895 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2897 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2900 return fold_build2 (code, type, arg0, arg1);
2908 /* Return a tree for the case when the result of an expression is RESULT
2909 converted to TYPE and OMITTED was previously an operand of the expression
2910 but is now not needed (e.g., we folded OMITTED * 0).
2912 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2913 the conversion of RESULT to TYPE. */
2916 omit_one_operand (tree type, tree result, tree omitted)
2918 tree t = fold_convert (type, result);
2920 if (TREE_SIDE_EFFECTS (omitted))
2921 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2923 return non_lvalue (t);
2926 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2929 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2931 tree t = fold_convert (type, result);
2933 if (TREE_SIDE_EFFECTS (omitted))
2934 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2936 return pedantic_non_lvalue (t);
2939 /* Return a tree for the case when the result of an expression is RESULT
2940 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2941 of the expression but are now not needed.
2943 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2944 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2945 evaluated before OMITTED2. Otherwise, if neither has side effects,
2946 just do the conversion of RESULT to TYPE. */
2949 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2951 tree t = fold_convert (type, result);
2953 if (TREE_SIDE_EFFECTS (omitted2))
2954 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2955 if (TREE_SIDE_EFFECTS (omitted1))
2956 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2958 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2962 /* Return a simplified tree node for the truth-negation of ARG. This
2963 never alters ARG itself. We assume that ARG is an operation that
2964 returns a truth value (0 or 1).
2966 FIXME: one would think we would fold the result, but it causes
2967 problems with the dominator optimizer. */
2969 invert_truthvalue (tree arg)
2971 tree type = TREE_TYPE (arg);
2972 enum tree_code code = TREE_CODE (arg);
2974 if (code == ERROR_MARK)
2977 /* If this is a comparison, we can simply invert it, except for
2978 floating-point non-equality comparisons, in which case we just
2979 enclose a TRUTH_NOT_EXPR around what we have. */
2981 if (TREE_CODE_CLASS (code) == tcc_comparison)
2983 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2984 if (FLOAT_TYPE_P (op_type)
2985 && flag_trapping_math
2986 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2987 && code != NE_EXPR && code != EQ_EXPR)
2988 return build1 (TRUTH_NOT_EXPR, type, arg);
2991 code = invert_tree_comparison (code,
2992 HONOR_NANS (TYPE_MODE (op_type)));
2993 if (code == ERROR_MARK)
2994 return build1 (TRUTH_NOT_EXPR, type, arg);
2996 return build2 (code, type,
2997 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3004 return constant_boolean_node (integer_zerop (arg), type);
3006 case TRUTH_AND_EXPR:
3007 return build2 (TRUTH_OR_EXPR, type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)),
3009 invert_truthvalue (TREE_OPERAND (arg, 1)));
3012 return build2 (TRUTH_AND_EXPR, type,
3013 invert_truthvalue (TREE_OPERAND (arg, 0)),
3014 invert_truthvalue (TREE_OPERAND (arg, 1)));
3016 case TRUTH_XOR_EXPR:
3017 /* Here we can invert either operand. We invert the first operand
3018 unless the second operand is a TRUTH_NOT_EXPR in which case our
3019 result is the XOR of the first operand with the inside of the
3020 negation of the second operand. */
3022 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3023 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3024 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3026 return build2 (TRUTH_XOR_EXPR, type,
3027 invert_truthvalue (TREE_OPERAND (arg, 0)),
3028 TREE_OPERAND (arg, 1));
3030 case TRUTH_ANDIF_EXPR:
3031 return build2 (TRUTH_ORIF_EXPR, type,
3032 invert_truthvalue (TREE_OPERAND (arg, 0)),
3033 invert_truthvalue (TREE_OPERAND (arg, 1)));
3035 case TRUTH_ORIF_EXPR:
3036 return build2 (TRUTH_ANDIF_EXPR, type,
3037 invert_truthvalue (TREE_OPERAND (arg, 0)),
3038 invert_truthvalue (TREE_OPERAND (arg, 1)));
3040 case TRUTH_NOT_EXPR:
3041 return TREE_OPERAND (arg, 0);
3045 tree arg1 = TREE_OPERAND (arg, 1);
3046 tree arg2 = TREE_OPERAND (arg, 2);
3047 /* A COND_EXPR may have a throw as one operand, which
3048 then has void type. Just leave void operands
3050 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3051 VOID_TYPE_P (TREE_TYPE (arg1))
3052 ? arg1 : invert_truthvalue (arg1),
3053 VOID_TYPE_P (TREE_TYPE (arg2))
3054 ? arg2 : invert_truthvalue (arg2));
3058 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3059 invert_truthvalue (TREE_OPERAND (arg, 1)));
3061 case NON_LVALUE_EXPR:
3062 return invert_truthvalue (TREE_OPERAND (arg, 0));
3065 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3070 return build1 (TREE_CODE (arg), type,
3071 invert_truthvalue (TREE_OPERAND (arg, 0)));
3074 if (!integer_onep (TREE_OPERAND (arg, 1)))
3076 return build2 (EQ_EXPR, type, arg,
3077 fold_convert (type, integer_zero_node));
3080 return build1 (TRUTH_NOT_EXPR, type, arg);
3082 case CLEANUP_POINT_EXPR:
3083 return build1 (CLEANUP_POINT_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)));
3089 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3090 return build1 (TRUTH_NOT_EXPR, type, arg);
3093 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3094 operands are another bit-wise operation with a common input. If so,
3095 distribute the bit operations to save an operation and possibly two if
3096 constants are involved. For example, convert
3097 (A | B) & (A | C) into A | (B & C)
3098 Further simplification will occur if B and C are constants.
3100 If this optimization cannot be done, 0 will be returned. */
3103 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3108 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3109 || TREE_CODE (arg0) == code
3110 || (TREE_CODE (arg0) != BIT_AND_EXPR
3111 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3114 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3116 common = TREE_OPERAND (arg0, 0);
3117 left = TREE_OPERAND (arg0, 1);
3118 right = TREE_OPERAND (arg1, 1);
3120 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3122 common = TREE_OPERAND (arg0, 0);
3123 left = TREE_OPERAND (arg0, 1);
3124 right = TREE_OPERAND (arg1, 0);
3126 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3128 common = TREE_OPERAND (arg0, 1);
3129 left = TREE_OPERAND (arg0, 0);
3130 right = TREE_OPERAND (arg1, 1);
3132 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3134 common = TREE_OPERAND (arg0, 1);
3135 left = TREE_OPERAND (arg0, 0);
3136 right = TREE_OPERAND (arg1, 0);
3141 return fold_build2 (TREE_CODE (arg0), type, common,
3142 fold_build2 (code, type, left, right));
3145 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3146 with code CODE. This optimization is unsafe. */
3148 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3150 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3151 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3153 /* (A / C) +- (B / C) -> (A +- B) / C. */
3155 && operand_equal_p (TREE_OPERAND (arg0, 1),
3156 TREE_OPERAND (arg1, 1), 0))
3157 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3158 fold_build2 (code, type,
3159 TREE_OPERAND (arg0, 0),
3160 TREE_OPERAND (arg1, 0)),
3161 TREE_OPERAND (arg0, 1));
3163 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3164 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3165 TREE_OPERAND (arg1, 0), 0)
3166 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3167 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3169 REAL_VALUE_TYPE r0, r1;
3170 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3171 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3173 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3175 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3176 real_arithmetic (&r0, code, &r0, &r1);
3177 return fold_build2 (MULT_EXPR, type,
3178 TREE_OPERAND (arg0, 0),
3179 build_real (type, r0));
3185 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3186 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3189 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3196 tree size = TYPE_SIZE (TREE_TYPE (inner));
3197 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3198 || POINTER_TYPE_P (TREE_TYPE (inner)))
3199 && host_integerp (size, 0)
3200 && tree_low_cst (size, 0) == bitsize)
3201 return fold_convert (type, inner);
3204 result = build3 (BIT_FIELD_REF, type, inner,
3205 size_int (bitsize), bitsize_int (bitpos));
3207 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3212 /* Optimize a bit-field compare.
3214 There are two cases: First is a compare against a constant and the
3215 second is a comparison of two items where the fields are at the same
3216 bit position relative to the start of a chunk (byte, halfword, word)
3217 large enough to contain it. In these cases we can avoid the shift
3218 implicit in bitfield extractions.
3220 For constants, we emit a compare of the shifted constant with the
3221 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3222 compared. For two fields at the same position, we do the ANDs with the
3223 similar mask and compare the result of the ANDs.
3225 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3226 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3227 are the left and right operands of the comparison, respectively.
3229 If the optimization described above can be done, we return the resulting
3230 tree. Otherwise we return zero. */
3233 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3236 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3237 tree type = TREE_TYPE (lhs);
3238 tree signed_type, unsigned_type;
3239 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3240 enum machine_mode lmode, rmode, nmode;
3241 int lunsignedp, runsignedp;
3242 int lvolatilep = 0, rvolatilep = 0;
3243 tree linner, rinner = NULL_TREE;
3247 /* Get all the information about the extractions being done. If the bit size
3248 if the same as the size of the underlying object, we aren't doing an
3249 extraction at all and so can do nothing. We also don't want to
3250 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3251 then will no longer be able to replace it. */
3252 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3253 &lunsignedp, &lvolatilep, false);
3254 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3255 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3260 /* If this is not a constant, we can only do something if bit positions,
3261 sizes, and signedness are the same. */
3262 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3263 &runsignedp, &rvolatilep, false);
3265 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3266 || lunsignedp != runsignedp || offset != 0
3267 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3271 /* See if we can find a mode to refer to this field. We should be able to,
3272 but fail if we can't. */
3273 nmode = get_best_mode (lbitsize, lbitpos,
3274 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3275 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3276 TYPE_ALIGN (TREE_TYPE (rinner))),
3277 word_mode, lvolatilep || rvolatilep);
3278 if (nmode == VOIDmode)
3281 /* Set signed and unsigned types of the precision of this mode for the
3283 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3284 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3286 /* Compute the bit position and size for the new reference and our offset
3287 within it. If the new reference is the same size as the original, we
3288 won't optimize anything, so return zero. */
3289 nbitsize = GET_MODE_BITSIZE (nmode);
3290 nbitpos = lbitpos & ~ (nbitsize - 1);
3292 if (nbitsize == lbitsize)
3295 if (BYTES_BIG_ENDIAN)
3296 lbitpos = nbitsize - lbitsize - lbitpos;
3298 /* Make the mask to be used against the extracted field. */
3299 mask = build_int_cst (unsigned_type, -1);
3300 mask = force_fit_type (mask, 0, false, false);
3301 mask = fold_convert (unsigned_type, mask);
3302 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3303 mask = const_binop (RSHIFT_EXPR, mask,
3304 size_int (nbitsize - lbitsize - lbitpos), 0);
3307 /* If not comparing with constant, just rework the comparison
3309 return build2 (code, compare_type,
3310 build2 (BIT_AND_EXPR, unsigned_type,
3311 make_bit_field_ref (linner, unsigned_type,
3312 nbitsize, nbitpos, 1),
3314 build2 (BIT_AND_EXPR, unsigned_type,
3315 make_bit_field_ref (rinner, unsigned_type,
3316 nbitsize, nbitpos, 1),
3319 /* Otherwise, we are handling the constant case. See if the constant is too
3320 big for the field. Warn and return a tree of for 0 (false) if so. We do
3321 this not only for its own sake, but to avoid having to test for this
3322 error case below. If we didn't, we might generate wrong code.
3324 For unsigned fields, the constant shifted right by the field length should
3325 be all zero. For signed fields, the high-order bits should agree with
3330 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3331 fold_convert (unsigned_type, rhs),
3332 size_int (lbitsize), 0)))
3334 warning (0, "comparison is always %d due to width of bit-field",
3336 return constant_boolean_node (code == NE_EXPR, compare_type);
3341 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3342 size_int (lbitsize - 1), 0);
3343 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3345 warning (0, "comparison is always %d due to width of bit-field",
3347 return constant_boolean_node (code == NE_EXPR, compare_type);
3351 /* Single-bit compares should always be against zero. */
3352 if (lbitsize == 1 && ! integer_zerop (rhs))
3354 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3355 rhs = fold_convert (type, integer_zero_node);
3358 /* Make a new bitfield reference, shift the constant over the
3359 appropriate number of bits and mask it with the computed mask
3360 (in case this was a signed field). If we changed it, make a new one. */
3361 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3364 TREE_SIDE_EFFECTS (lhs) = 1;
3365 TREE_THIS_VOLATILE (lhs) = 1;
3368 rhs = const_binop (BIT_AND_EXPR,
3369 const_binop (LSHIFT_EXPR,
3370 fold_convert (unsigned_type, rhs),
3371 size_int (lbitpos), 0),
3374 return build2 (code, compare_type,
3375 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3379 /* Subroutine for fold_truthop: decode a field reference.
3381 If EXP is a comparison reference, we return the innermost reference.
3383 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3384 set to the starting bit number.
3386 If the innermost field can be completely contained in a mode-sized
3387 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3389 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3390 otherwise it is not changed.
3392 *PUNSIGNEDP is set to the signedness of the field.
3394 *PMASK is set to the mask used. This is either contained in a
3395 BIT_AND_EXPR or derived from the width of the field.
3397 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3399 Return 0 if this is not a component reference or is one that we can't
3400 do anything with. */
3403 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3404 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3405 int *punsignedp, int *pvolatilep,
3406 tree *pmask, tree *pand_mask)
3408 tree outer_type = 0;
3410 tree mask, inner, offset;
3412 unsigned int precision;
3414 /* All the optimizations using this function assume integer fields.
3415 There are problems with FP fields since the type_for_size call
3416 below can fail for, e.g., XFmode. */
3417 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3420 /* We are interested in the bare arrangement of bits, so strip everything
3421 that doesn't affect the machine mode. However, record the type of the
3422 outermost expression if it may matter below. */
3423 if (TREE_CODE (exp) == NOP_EXPR
3424 || TREE_CODE (exp) == CONVERT_EXPR
3425 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3426 outer_type = TREE_TYPE (exp);
3429 if (TREE_CODE (exp) == BIT_AND_EXPR)
3431 and_mask = TREE_OPERAND (exp, 1);
3432 exp = TREE_OPERAND (exp, 0);
3433 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3434 if (TREE_CODE (and_mask) != INTEGER_CST)
3438 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3439 punsignedp, pvolatilep, false);
3440 if ((inner == exp && and_mask == 0)
3441 || *pbitsize < 0 || offset != 0
3442 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3445 /* If the number of bits in the reference is the same as the bitsize of
3446 the outer type, then the outer type gives the signedness. Otherwise
3447 (in case of a small bitfield) the signedness is unchanged. */
3448 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3449 *punsignedp = TYPE_UNSIGNED (outer_type);
3451 /* Compute the mask to access the bitfield. */
3452 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3453 precision = TYPE_PRECISION (unsigned_type);
3455 mask = build_int_cst (unsigned_type, -1);
3456 mask = force_fit_type (mask, 0, false, false);
3458 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3459 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3461 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3463 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3464 fold_convert (unsigned_type, and_mask), mask);
3467 *pand_mask = and_mask;
3471 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3475 all_ones_mask_p (tree mask, int size)
3477 tree type = TREE_TYPE (mask);
3478 unsigned int precision = TYPE_PRECISION (type);
3481 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3482 tmask = force_fit_type (tmask, 0, false, false);
3485 tree_int_cst_equal (mask,
3486 const_binop (RSHIFT_EXPR,
3487 const_binop (LSHIFT_EXPR, tmask,
3488 size_int (precision - size),
3490 size_int (precision - size), 0));
3493 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3494 represents the sign bit of EXP's type. If EXP represents a sign
3495 or zero extension, also test VAL against the unextended type.
3496 The return value is the (sub)expression whose sign bit is VAL,
3497 or NULL_TREE otherwise. */
3500 sign_bit_p (tree exp, tree val)
3502 unsigned HOST_WIDE_INT mask_lo, lo;
3503 HOST_WIDE_INT mask_hi, hi;
3507 /* Tree EXP must have an integral type. */
3508 t = TREE_TYPE (exp);
3509 if (! INTEGRAL_TYPE_P (t))
3512 /* Tree VAL must be an integer constant. */
3513 if (TREE_CODE (val) != INTEGER_CST
3514 || TREE_CONSTANT_OVERFLOW (val))
3517 width = TYPE_PRECISION (t);
3518 if (width > HOST_BITS_PER_WIDE_INT)
3520 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3523 mask_hi = ((unsigned HOST_WIDE_INT) -1
3524 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3530 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3533 mask_lo = ((unsigned HOST_WIDE_INT) -1
3534 >> (HOST_BITS_PER_WIDE_INT - width));
3537 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3538 treat VAL as if it were unsigned. */
3539 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3540 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3543 /* Handle extension from a narrower type. */
3544 if (TREE_CODE (exp) == NOP_EXPR
3545 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3546 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3551 /* Subroutine for fold_truthop: determine if an operand is simple enough
3552 to be evaluated unconditionally. */
3555 simple_operand_p (tree exp)
3557 /* Strip any conversions that don't change the machine mode. */
3560 return (CONSTANT_CLASS_P (exp)
3561 || TREE_CODE (exp) == SSA_NAME
3563 && ! TREE_ADDRESSABLE (exp)
3564 && ! TREE_THIS_VOLATILE (exp)
3565 && ! DECL_NONLOCAL (exp)
3566 /* Don't regard global variables as simple. They may be
3567 allocated in ways unknown to the compiler (shared memory,
3568 #pragma weak, etc). */
3569 && ! TREE_PUBLIC (exp)
3570 && ! DECL_EXTERNAL (exp)
3571 /* Loading a static variable is unduly expensive, but global
3572 registers aren't expensive. */
3573 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3576 /* The following functions are subroutines to fold_range_test and allow it to
3577 try to change a logical combination of comparisons into a range test.
3580 X == 2 || X == 3 || X == 4 || X == 5
3584 (unsigned) (X - 2) <= 3
3586 We describe each set of comparisons as being either inside or outside
3587 a range, using a variable named like IN_P, and then describe the
3588 range with a lower and upper bound. If one of the bounds is omitted,
3589 it represents either the highest or lowest value of the type.
3591 In the comments below, we represent a range by two numbers in brackets
3592 preceded by a "+" to designate being inside that range, or a "-" to
3593 designate being outside that range, so the condition can be inverted by
3594 flipping the prefix. An omitted bound is represented by a "-". For
3595 example, "- [-, 10]" means being outside the range starting at the lowest
3596 possible value and ending at 10, in other words, being greater than 10.
3597 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3600 We set up things so that the missing bounds are handled in a consistent
3601 manner so neither a missing bound nor "true" and "false" need to be
3602 handled using a special case. */
3604 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3605 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3606 and UPPER1_P are nonzero if the respective argument is an upper bound
3607 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3608 must be specified for a comparison. ARG1 will be converted to ARG0's
3609 type if both are specified. */
3612 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3613 tree arg1, int upper1_p)
3619 /* If neither arg represents infinity, do the normal operation.
3620 Else, if not a comparison, return infinity. Else handle the special
3621 comparison rules. Note that most of the cases below won't occur, but
3622 are handled for consistency. */
3624 if (arg0 != 0 && arg1 != 0)
3626 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3627 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3629 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3632 if (TREE_CODE_CLASS (code) != tcc_comparison)
3635 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3636 for neither. In real maths, we cannot assume open ended ranges are
3637 the same. But, this is computer arithmetic, where numbers are finite.
3638 We can therefore make the transformation of any unbounded range with
3639 the value Z, Z being greater than any representable number. This permits
3640 us to treat unbounded ranges as equal. */
3641 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3642 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3646 result = sgn0 == sgn1;
3649 result = sgn0 != sgn1;
3652 result = sgn0 < sgn1;
3655 result = sgn0 <= sgn1;
3658 result = sgn0 > sgn1;
3661 result = sgn0 >= sgn1;
3667 return constant_boolean_node (result, type);
3670 /* Given EXP, a logical expression, set the range it is testing into
3671 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3672 actually being tested. *PLOW and *PHIGH will be made of the same type
3673 as the returned expression. If EXP is not a comparison, we will most
3674 likely not be returning a useful value and range. */
3677 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3679 enum tree_code code;
3680 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3681 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3683 tree low, high, n_low, n_high;
3685 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3686 and see if we can refine the range. Some of the cases below may not
3687 happen, but it doesn't seem worth worrying about this. We "continue"
3688 the outer loop when we've changed something; otherwise we "break"
3689 the switch, which will "break" the while. */
3692 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3696 code = TREE_CODE (exp);
3697 exp_type = TREE_TYPE (exp);
3699 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3701 if (TREE_CODE_LENGTH (code) > 0)
3702 arg0 = TREE_OPERAND (exp, 0);
3703 if (TREE_CODE_CLASS (code) == tcc_comparison
3704 || TREE_CODE_CLASS (code) == tcc_unary
3705 || TREE_CODE_CLASS (code) == tcc_binary)
3706 arg0_type = TREE_TYPE (arg0);
3707 if (TREE_CODE_CLASS (code) == tcc_binary
3708 || TREE_CODE_CLASS (code) == tcc_comparison
3709 || (TREE_CODE_CLASS (code) == tcc_expression
3710 && TREE_CODE_LENGTH (code) > 1))
3711 arg1 = TREE_OPERAND (exp, 1);
3716 case TRUTH_NOT_EXPR:
3717 in_p = ! in_p, exp = arg0;
3720 case EQ_EXPR: case NE_EXPR:
3721 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3722 /* We can only do something if the range is testing for zero
3723 and if the second operand is an integer constant. Note that
3724 saying something is "in" the range we make is done by
3725 complementing IN_P since it will set in the initial case of
3726 being not equal to zero; "out" is leaving it alone. */
3727 if (low == 0 || high == 0
3728 || ! integer_zerop (low) || ! integer_zerop (high)
3729 || TREE_CODE (arg1) != INTEGER_CST)
3734 case NE_EXPR: /* - [c, c] */
3737 case EQ_EXPR: /* + [c, c] */
3738 in_p = ! in_p, low = high = arg1;
3740 case GT_EXPR: /* - [-, c] */
3741 low = 0, high = arg1;
3743 case GE_EXPR: /* + [c, -] */
3744 in_p = ! in_p, low = arg1, high = 0;
3746 case LT_EXPR: /* - [c, -] */
3747 low = arg1, high = 0;
3749 case LE_EXPR: /* + [-, c] */
3750 in_p = ! in_p, low = 0, high = arg1;
3756 /* If this is an unsigned comparison, we also know that EXP is
3757 greater than or equal to zero. We base the range tests we make
3758 on that fact, so we record it here so we can parse existing
3759 range tests. We test arg0_type since often the return type
3760 of, e.g. EQ_EXPR, is boolean. */
3761 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3763 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3765 fold_convert (arg0_type, integer_zero_node),
3769 in_p = n_in_p, low = n_low, high = n_high;
3771 /* If the high bound is missing, but we have a nonzero low
3772 bound, reverse the range so it goes from zero to the low bound
3774 if (high == 0 && low && ! integer_zerop (low))
3777 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3778 integer_one_node, 0);
3779 low = fold_convert (arg0_type, integer_zero_node);
3787 /* (-x) IN [a,b] -> x in [-b, -a] */
3788 n_low = range_binop (MINUS_EXPR, exp_type,
3789 fold_convert (exp_type, integer_zero_node),
3791 n_high = range_binop (MINUS_EXPR, exp_type,
3792 fold_convert (exp_type, integer_zero_node),
3794 low = n_low, high = n_high;
3800 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3801 fold_convert (exp_type, integer_one_node));
3804 case PLUS_EXPR: case MINUS_EXPR:
3805 if (TREE_CODE (arg1) != INTEGER_CST)
3808 /* If EXP is signed, any overflow in the computation is undefined,
3809 so we don't worry about it so long as our computations on
3810 the bounds don't overflow. For unsigned, overflow is defined
3811 and this is exactly the right thing. */
3812 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3813 arg0_type, low, 0, arg1, 0);
3814 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3815 arg0_type, high, 1, arg1, 0);
3816 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3817 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3820 /* Check for an unsigned range which has wrapped around the maximum
3821 value thus making n_high < n_low, and normalize it. */
3822 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3824 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3825 integer_one_node, 0);
3826 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3827 integer_one_node, 0);
3829 /* If the range is of the form +/- [ x+1, x ], we won't
3830 be able to normalize it. But then, it represents the
3831 whole range or the empty set, so make it
3833 if (tree_int_cst_equal (n_low, low)
3834 && tree_int_cst_equal (n_high, high))
3840 low = n_low, high = n_high;
3845 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3846 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3849 if (! INTEGRAL_TYPE_P (arg0_type)
3850 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3851 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3854 n_low = low, n_high = high;
3857 n_low = fold_convert (arg0_type, n_low);
3860 n_high = fold_convert (arg0_type, n_high);
3863 /* If we're converting arg0 from an unsigned type, to exp,
3864 a signed type, we will be doing the comparison as unsigned.
3865 The tests above have already verified that LOW and HIGH
3868 So we have to ensure that we will handle large unsigned
3869 values the same way that the current signed bounds treat
3872 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3875 tree equiv_type = lang_hooks.types.type_for_mode
3876 (TYPE_MODE (arg0_type), 1);
3878 /* A range without an upper bound is, naturally, unbounded.
3879 Since convert would have cropped a very large value, use
3880 the max value for the destination type. */
3882 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3883 : TYPE_MAX_VALUE (arg0_type);
3885 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3886 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3887 fold_convert (arg0_type,
3889 fold_convert (arg0_type,
3892 /* If the low bound is specified, "and" the range with the
3893 range for which the original unsigned value will be
3897 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3898 1, n_low, n_high, 1,
3899 fold_convert (arg0_type,
3904 in_p = (n_in_p == in_p);
3908 /* Otherwise, "or" the range with the range of the input
3909 that will be interpreted as negative. */
3910 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3911 0, n_low, n_high, 1,
3912 fold_convert (arg0_type,
3917 in_p = (in_p != n_in_p);
3922 low = n_low, high = n_high;
3932 /* If EXP is a constant, we can evaluate whether this is true or false. */
3933 if (TREE_CODE (exp) == INTEGER_CST)
3935 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3937 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3943 *pin_p = in_p, *plow = low, *phigh = high;
3947 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3948 type, TYPE, return an expression to test if EXP is in (or out of, depending
3949 on IN_P) the range. Return 0 if the test couldn't be created. */
3952 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3954 tree etype = TREE_TYPE (exp);
3957 #ifdef HAVE_canonicalize_funcptr_for_compare
3958 /* Disable this optimization for function pointer expressions
3959 on targets that require function pointer canonicalization. */
3960 if (HAVE_canonicalize_funcptr_for_compare
3961 && TREE_CODE (etype) == POINTER_TYPE
3962 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3968 value = build_range_check (type, exp, 1, low, high);
3970 return invert_truthvalue (value);
3975 if (low == 0 && high == 0)
3976 return fold_convert (type, integer_one_node);
3979 return fold_build2 (LE_EXPR, type, exp,
3980 fold_convert (etype, high));
3983 return fold_build2 (GE_EXPR, type, exp,
3984 fold_convert (etype, low));
3986 if (operand_equal_p (low, high, 0))
3987 return fold_build2 (EQ_EXPR, type, exp,
3988 fold_convert (etype, low));
3990 if (integer_zerop (low))
3992 if (! TYPE_UNSIGNED (etype))
3994 etype = lang_hooks.types.unsigned_type (etype);
3995 high = fold_convert (etype, high);
3996 exp = fold_convert (etype, exp);
3998 return build_range_check (type, exp, 1, 0, high);
4001 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4002 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4004 unsigned HOST_WIDE_INT lo;
4008 prec = TYPE_PRECISION (etype);
4009 if (prec <= HOST_BITS_PER_WIDE_INT)
4012 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4016 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4017 lo = (unsigned HOST_WIDE_INT) -1;
4020 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4022 if (TYPE_UNSIGNED (etype))
4024 etype = lang_hooks.types.signed_type (etype);
4025 exp = fold_convert (etype, exp);
4027 return fold_build2 (GT_EXPR, type, exp,
4028 fold_convert (etype, integer_zero_node));
4032 value = const_binop (MINUS_EXPR, high, low, 0);
4033 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4034 && ! TYPE_UNSIGNED (etype))
4036 tree utype, minv, maxv;
4038 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4039 for the type in question, as we rely on this here. */
4040 switch (TREE_CODE (etype))
4045 /* There is no requirement that LOW be within the range of ETYPE
4046 if the latter is a subtype. It must, however, be within the base
4047 type of ETYPE. So be sure we do the subtraction in that type. */
4048 if (TREE_TYPE (etype))
4049 etype = TREE_TYPE (etype);
4050 utype = lang_hooks.types.unsigned_type (etype);
4051 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4052 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4053 integer_one_node, 1);
4054 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4055 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4059 high = fold_convert (etype, high);
4060 low = fold_convert (etype, low);
4061 exp = fold_convert (etype, exp);
4062 value = const_binop (MINUS_EXPR, high, low, 0);
4070 if (value != 0 && ! TREE_OVERFLOW (value))
4072 /* There is no requirement that LOW be within the range of ETYPE
4073 if the latter is a subtype. It must, however, be within the base
4074 type of ETYPE. So be sure we do the subtraction in that type. */
4075 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4077 etype = TREE_TYPE (etype);
4078 exp = fold_convert (etype, exp);
4079 low = fold_convert (etype, low);
4080 value = fold_convert (etype, value);
4083 return build_range_check (type,
4084 fold_build2 (MINUS_EXPR, etype, exp, low),
4085 1, build_int_cst (etype, 0), value);
4091 /* Given two ranges, see if we can merge them into one. Return 1 if we
4092 can, 0 if we can't. Set the output range into the specified parameters. */
4095 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4096 tree high0, int in1_p, tree low1, tree high1)
4104 int lowequal = ((low0 == 0 && low1 == 0)
4105 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4106 low0, 0, low1, 0)));
4107 int highequal = ((high0 == 0 && high1 == 0)
4108 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4109 high0, 1, high1, 1)));
4111 /* Make range 0 be the range that starts first, or ends last if they
4112 start at the same value. Swap them if it isn't. */
4113 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4116 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4117 high1, 1, high0, 1))))
4119 temp = in0_p, in0_p = in1_p, in1_p = temp;
4120 tem = low0, low0 = low1, low1 = tem;
4121 tem = high0, high0 = high1, high1 = tem;
4124 /* Now flag two cases, whether the ranges are disjoint or whether the
4125 second range is totally subsumed in the first. Note that the tests
4126 below are simplified by the ones above. */
4127 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4128 high0, 1, low1, 0));
4129 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4130 high1, 1, high0, 1));
4132 /* We now have four cases, depending on whether we are including or
4133 excluding the two ranges. */
4136 /* If they don't overlap, the result is false. If the second range
4137 is a subset it is the result. Otherwise, the range is from the start
4138 of the second to the end of the first. */
4140 in_p = 0, low = high = 0;
4142 in_p = 1, low = low1, high = high1;
4144 in_p = 1, low = low1, high = high0;
4147 else if (in0_p && ! in1_p)
4149 /* If they don't overlap, the result is the first range. If they are
4150 equal, the result is false. If the second range is a subset of the
4151 first, and the ranges begin at the same place, we go from just after
4152 the end of the first range to the end of the second. If the second
4153 range is not a subset of the first, or if it is a subset and both
4154 ranges end at the same place, the range starts at the start of the
4155 first range and ends just before the second range.
4156 Otherwise, we can't describe this as a single range. */
4158 in_p = 1, low = low0, high = high0;
4159 else if (lowequal && highequal)
4160 in_p = 0, low = high = 0;
4161 else if (subset && lowequal)
4163 in_p = 1, high = high0;
4164 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4165 integer_one_node, 0);
4167 else if (! subset || highequal)
4169 in_p = 1, low = low0;
4170 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4171 integer_one_node, 0);
4177 else if (! in0_p && in1_p)
4179 /* If they don't overlap, the result is the second range. If the second
4180 is a subset of the first, the result is false. Otherwise,
4181 the range starts just after the first range and ends at the
4182 end of the second. */
4184 in_p = 1, low = low1, high = high1;
4185 else if (subset || highequal)
4186 in_p = 0, low = high = 0;
4189 in_p = 1, high = high1;
4190 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4191 integer_one_node, 0);
4197 /* The case where we are excluding both ranges. Here the complex case
4198 is if they don't overlap. In that case, the only time we have a
4199 range is if they are adjacent. If the second is a subset of the
4200 first, the result is the first. Otherwise, the range to exclude
4201 starts at the beginning of the first range and ends at the end of the
4205 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4206 range_binop (PLUS_EXPR, NULL_TREE,
4208 integer_one_node, 1),
4210 in_p = 0, low = low0, high = high1;
4213 /* Canonicalize - [min, x] into - [-, x]. */
4214 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4215 switch (TREE_CODE (TREE_TYPE (low0)))
4218 if (TYPE_PRECISION (TREE_TYPE (low0))
4219 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4224 if (tree_int_cst_equal (low0,
4225 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4229 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4230 && integer_zerop (low0))
4237 /* Canonicalize - [x, max] into - [x, -]. */
4238 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4239 switch (TREE_CODE (TREE_TYPE (high1)))
4242 if (TYPE_PRECISION (TREE_TYPE (high1))
4243 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4248 if (tree_int_cst_equal (high1,
4249 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4253 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4254 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4256 integer_one_node, 1)))
4263 /* The ranges might be also adjacent between the maximum and
4264 minimum values of the given type. For
4265 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4266 return + [x + 1, y - 1]. */
4267 if (low0 == 0 && high1 == 0)
4269 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4270 integer_one_node, 1);
4271 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4272 integer_one_node, 0);
4273 if (low == 0 || high == 0)
4283 in_p = 0, low = low0, high = high0;
4285 in_p = 0, low = low0, high = high1;
4288 *pin_p = in_p, *plow = low, *phigh = high;
4293 /* Subroutine of fold, looking inside expressions of the form
4294 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4295 of the COND_EXPR. This function is being used also to optimize
4296 A op B ? C : A, by reversing the comparison first.
4298 Return a folded expression whose code is not a COND_EXPR
4299 anymore, or NULL_TREE if no folding opportunity is found. */
4302 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4304 enum tree_code comp_code = TREE_CODE (arg0);
4305 tree arg00 = TREE_OPERAND (arg0, 0);
4306 tree arg01 = TREE_OPERAND (arg0, 1);
4307 tree arg1_type = TREE_TYPE (arg1);
4313 /* If we have A op 0 ? A : -A, consider applying the following
4316 A == 0? A : -A same as -A
4317 A != 0? A : -A same as A
4318 A >= 0? A : -A same as abs (A)
4319 A > 0? A : -A same as abs (A)
4320 A <= 0? A : -A same as -abs (A)
4321 A < 0? A : -A same as -abs (A)
4323 None of these transformations work for modes with signed
4324 zeros. If A is +/-0, the first two transformations will
4325 change the sign of the result (from +0 to -0, or vice
4326 versa). The last four will fix the sign of the result,
4327 even though the original expressions could be positive or
4328 negative, depending on the sign of A.
4330 Note that all these transformations are correct if A is
4331 NaN, since the two alternatives (A and -A) are also NaNs. */
4332 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4333 ? real_zerop (arg01)
4334 : integer_zerop (arg01))
4335 && ((TREE_CODE (arg2) == NEGATE_EXPR
4336 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4337 /* In the case that A is of the form X-Y, '-A' (arg2) may
4338 have already been folded to Y-X, check for that. */
4339 || (TREE_CODE (arg1) == MINUS_EXPR
4340 && TREE_CODE (arg2) == MINUS_EXPR
4341 && operand_equal_p (TREE_OPERAND (arg1, 0),
4342 TREE_OPERAND (arg2, 1), 0)
4343 && operand_equal_p (TREE_OPERAND (arg1, 1),
4344 TREE_OPERAND (arg2, 0), 0))))
4349 tem = fold_convert (arg1_type, arg1);
4350 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4353 return pedantic_non_lvalue (fold_convert (type, arg1));
4356 if (flag_trapping_math)
4361 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4362 arg1 = fold_convert (lang_hooks.types.signed_type
4363 (TREE_TYPE (arg1)), arg1);
4364 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4365 return pedantic_non_lvalue (fold_convert (type, tem));
4368 if (flag_trapping_math)
4372 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4373 arg1 = fold_convert (lang_hooks.types.signed_type
4374 (TREE_TYPE (arg1)), arg1);
4375 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4376 return negate_expr (fold_convert (type, tem));
4378 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4382 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4383 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4384 both transformations are correct when A is NaN: A != 0
4385 is then true, and A == 0 is false. */
4387 if (integer_zerop (arg01) && integer_zerop (arg2))
4389 if (comp_code == NE_EXPR)
4390 return pedantic_non_lvalue (fold_convert (type, arg1));
4391 else if (comp_code == EQ_EXPR)
4392 return fold_convert (type, integer_zero_node);
4395 /* Try some transformations of A op B ? A : B.
4397 A == B? A : B same as B
4398 A != B? A : B same as A
4399 A >= B? A : B same as max (A, B)
4400 A > B? A : B same as max (B, A)
4401 A <= B? A : B same as min (A, B)
4402 A < B? A : B same as min (B, A)
4404 As above, these transformations don't work in the presence
4405 of signed zeros. For example, if A and B are zeros of
4406 opposite sign, the first two transformations will change
4407 the sign of the result. In the last four, the original
4408 expressions give different results for (A=+0, B=-0) and
4409 (A=-0, B=+0), but the transformed expressions do not.
4411 The first two transformations are correct if either A or B
4412 is a NaN. In the first transformation, the condition will
4413 be false, and B will indeed be chosen. In the case of the
4414 second transformation, the condition A != B will be true,
4415 and A will be chosen.
4417 The conversions to max() and min() are not correct if B is
4418 a number and A is not. The conditions in the original
4419 expressions will be false, so all four give B. The min()
4420 and max() versions would give a NaN instead. */
4421 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4422 /* Avoid these transformations if the COND_EXPR may be used
4423 as an lvalue in the C++ front-end. PR c++/19199. */
4425 || strcmp (lang_hooks.name, "GNU C++") != 0
4426 || ! maybe_lvalue_p (arg1)
4427 || ! maybe_lvalue_p (arg2)))
4429 tree comp_op0 = arg00;
4430 tree comp_op1 = arg01;
4431 tree comp_type = TREE_TYPE (comp_op0);
4433 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4434 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4444 return pedantic_non_lvalue (fold_convert (type, arg2));
4446 return pedantic_non_lvalue (fold_convert (type, arg1));
4451 /* In C++ a ?: expression can be an lvalue, so put the
4452 operand which will be used if they are equal first
4453 so that we can convert this back to the
4454 corresponding COND_EXPR. */
4455 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4457 comp_op0 = fold_convert (comp_type, comp_op0);
4458 comp_op1 = fold_convert (comp_type, comp_op1);
4459 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4460 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4461 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4462 return pedantic_non_lvalue (fold_convert (type, tem));
4469 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4471 comp_op0 = fold_convert (comp_type, comp_op0);
4472 comp_op1 = fold_convert (comp_type, comp_op1);
4473 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4474 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4475 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4476 return pedantic_non_lvalue (fold_convert (type, tem));
4480 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4481 return pedantic_non_lvalue (fold_convert (type, arg2));
4484 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4485 return pedantic_non_lvalue (fold_convert (type, arg1));
4488 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4493 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4494 we might still be able to simplify this. For example,
4495 if C1 is one less or one more than C2, this might have started
4496 out as a MIN or MAX and been transformed by this function.
4497 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4499 if (INTEGRAL_TYPE_P (type)
4500 && TREE_CODE (arg01) == INTEGER_CST
4501 && TREE_CODE (arg2) == INTEGER_CST)
4505 /* We can replace A with C1 in this case. */
4506 arg1 = fold_convert (type, arg01);
4507 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4510 /* If C1 is C2 + 1, this is min(A, C2). */
4511 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4513 && operand_equal_p (arg01,
4514 const_binop (PLUS_EXPR, arg2,
4515 integer_one_node, 0),
4517 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4522 /* If C1 is C2 - 1, this is min(A, C2). */
4523 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4525 && operand_equal_p (arg01,
4526 const_binop (MINUS_EXPR, arg2,
4527 integer_one_node, 0),
4529 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4534 /* If C1 is C2 - 1, this is max(A, C2). */
4535 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4537 && operand_equal_p (arg01,
4538 const_binop (MINUS_EXPR, arg2,
4539 integer_one_node, 0),
4541 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4546 /* If C1 is C2 + 1, this is max(A, C2). */
4547 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4549 && operand_equal_p (arg01,
4550 const_binop (PLUS_EXPR, arg2,
4551 integer_one_node, 0),
4553 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4567 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4568 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4571 /* EXP is some logical combination of boolean tests. See if we can
4572 merge it into some range test. Return the new tree if so. */
4575 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4577 int or_op = (code == TRUTH_ORIF_EXPR
4578 || code == TRUTH_OR_EXPR);
4579 int in0_p, in1_p, in_p;
4580 tree low0, low1, low, high0, high1, high;
4581 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4582 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4585 /* If this is an OR operation, invert both sides; we will invert
4586 again at the end. */
4588 in0_p = ! in0_p, in1_p = ! in1_p;
4590 /* If both expressions are the same, if we can merge the ranges, and we
4591 can build the range test, return it or it inverted. If one of the
4592 ranges is always true or always false, consider it to be the same
4593 expression as the other. */
4594 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4595 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4597 && 0 != (tem = (build_range_check (type,
4599 : rhs != 0 ? rhs : integer_zero_node,
4601 return or_op ? invert_truthvalue (tem) : tem;
4603 /* On machines where the branch cost is expensive, if this is a
4604 short-circuited branch and the underlying object on both sides
4605 is the same, make a non-short-circuit operation. */
4606 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4607 && lhs != 0 && rhs != 0
4608 && (code == TRUTH_ANDIF_EXPR
4609 || code == TRUTH_ORIF_EXPR)
4610 && operand_equal_p (lhs, rhs, 0))
4612 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4613 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4614 which cases we can't do this. */
4615 if (simple_operand_p (lhs))
4616 return build2 (code == TRUTH_ANDIF_EXPR
4617 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4620 else if (lang_hooks.decls.global_bindings_p () == 0
4621 && ! CONTAINS_PLACEHOLDER_P (lhs))
4623 tree common = save_expr (lhs);
4625 if (0 != (lhs = build_range_check (type, common,
4626 or_op ? ! in0_p : in0_p,
4628 && (0 != (rhs = build_range_check (type, common,
4629 or_op ? ! in1_p : in1_p,
4631 return build2 (code == TRUTH_ANDIF_EXPR
4632 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4640 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4641 bit value. Arrange things so the extra bits will be set to zero if and
4642 only if C is signed-extended to its full width. If MASK is nonzero,
4643 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4646 unextend (tree c, int p, int unsignedp, tree mask)
4648 tree type = TREE_TYPE (c);
4649 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4652 if (p == modesize || unsignedp)
4655 /* We work by getting just the sign bit into the low-order bit, then
4656 into the high-order bit, then sign-extend. We then XOR that value
4658 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4659 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4661 /* We must use a signed type in order to get an arithmetic right shift.
4662 However, we must also avoid introducing accidental overflows, so that
4663 a subsequent call to integer_zerop will work. Hence we must
4664 do the type conversion here. At this point, the constant is either
4665 zero or one, and the conversion to a signed type can never overflow.
4666 We could get an overflow if this conversion is done anywhere else. */
4667 if (TYPE_UNSIGNED (type))
4668 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4670 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4671 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4673 temp = const_binop (BIT_AND_EXPR, temp,
4674 fold_convert (TREE_TYPE (c), mask), 0);
4675 /* If necessary, convert the type back to match the type of C. */
4676 if (TYPE_UNSIGNED (type))
4677 temp = fold_convert (type, temp);
4679 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4682 /* Find ways of folding logical expressions of LHS and RHS:
4683 Try to merge two comparisons to the same innermost item.
4684 Look for range tests like "ch >= '0' && ch <= '9'".
4685 Look for combinations of simple terms on machines with expensive branches
4686 and evaluate the RHS unconditionally.
4688 For example, if we have p->a == 2 && p->b == 4 and we can make an
4689 object large enough to span both A and B, we can do this with a comparison
4690 against the object ANDed with the a mask.
4692 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4693 operations to do this with one comparison.
4695 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4696 function and the one above.
4698 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4699 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4701 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4704 We return the simplified tree or 0 if no optimization is possible. */
4707 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4709 /* If this is the "or" of two comparisons, we can do something if
4710 the comparisons are NE_EXPR. If this is the "and", we can do something
4711 if the comparisons are EQ_EXPR. I.e.,
4712 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4714 WANTED_CODE is this operation code. For single bit fields, we can
4715 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4716 comparison for one-bit fields. */
4718 enum tree_code wanted_code;
4719 enum tree_code lcode, rcode;
4720 tree ll_arg, lr_arg, rl_arg, rr_arg;
4721 tree ll_inner, lr_inner, rl_inner, rr_inner;
4722 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4723 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4724 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4725 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4726 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4727 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4728 enum machine_mode lnmode, rnmode;
4729 tree ll_mask, lr_mask, rl_mask, rr_mask;
4730 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4731 tree l_const, r_const;
4732 tree lntype, rntype, result;
4733 int first_bit, end_bit;
4736 /* Start by getting the comparison codes. Fail if anything is volatile.
4737 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4738 it were surrounded with a NE_EXPR. */
4740 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4743 lcode = TREE_CODE (lhs);
4744 rcode = TREE_CODE (rhs);
4746 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4748 lhs = build2 (NE_EXPR, truth_type, lhs,
4749 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4753 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4755 rhs = build2 (NE_EXPR, truth_type, rhs,
4756 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4760 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4761 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4764 ll_arg = TREE_OPERAND (lhs, 0);
4765 lr_arg = TREE_OPERAND (lhs, 1);
4766 rl_arg = TREE_OPERAND (rhs, 0);
4767 rr_arg = TREE_OPERAND (rhs, 1);
4769 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4770 if (simple_operand_p (ll_arg)
4771 && simple_operand_p (lr_arg))
4774 if (operand_equal_p (ll_arg, rl_arg, 0)
4775 && operand_equal_p (lr_arg, rr_arg, 0))
4777 result = combine_comparisons (code, lcode, rcode,
4778 truth_type, ll_arg, lr_arg);
4782 else if (operand_equal_p (ll_arg, rr_arg, 0)
4783 && operand_equal_p (lr_arg, rl_arg, 0))
4785 result = combine_comparisons (code, lcode,
4786 swap_tree_comparison (rcode),
4787 truth_type, ll_arg, lr_arg);
4793 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4794 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4796 /* If the RHS can be evaluated unconditionally and its operands are
4797 simple, it wins to evaluate the RHS unconditionally on machines
4798 with expensive branches. In this case, this isn't a comparison
4799 that can be merged. Avoid doing this if the RHS is a floating-point
4800 comparison since those can trap. */
4802 if (BRANCH_COST >= 2
4803 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4804 && simple_operand_p (rl_arg)
4805 && simple_operand_p (rr_arg))
4807 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4808 if (code == TRUTH_OR_EXPR
4809 && lcode == NE_EXPR && integer_zerop (lr_arg)
4810 && rcode == NE_EXPR && integer_zerop (rr_arg)
4811 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4812 return build2 (NE_EXPR, truth_type,
4813 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4815 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4817 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4818 if (code == TRUTH_AND_EXPR
4819 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4820 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4821 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4822 return build2 (EQ_EXPR, truth_type,
4823 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4825 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4827 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4828 return build2 (code, truth_type, lhs, rhs);
4831 /* See if the comparisons can be merged. Then get all the parameters for
4834 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4835 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4839 ll_inner = decode_field_reference (ll_arg,
4840 &ll_bitsize, &ll_bitpos, &ll_mode,
4841 &ll_unsignedp, &volatilep, &ll_mask,
4843 lr_inner = decode_field_reference (lr_arg,
4844 &lr_bitsize, &lr_bitpos, &lr_mode,
4845 &lr_unsignedp, &volatilep, &lr_mask,
4847 rl_inner = decode_field_reference (rl_arg,
4848 &rl_bitsize, &rl_bitpos, &rl_mode,
4849 &rl_unsignedp, &volatilep, &rl_mask,
4851 rr_inner = decode_field_reference (rr_arg,
4852 &rr_bitsize, &rr_bitpos, &rr_mode,
4853 &rr_unsignedp, &volatilep, &rr_mask,
4856 /* It must be true that the inner operation on the lhs of each
4857 comparison must be the same if we are to be able to do anything.
4858 Then see if we have constants. If not, the same must be true for
4860 if (volatilep || ll_inner == 0 || rl_inner == 0
4861 || ! operand_equal_p (ll_inner, rl_inner, 0))
4864 if (TREE_CODE (lr_arg) == INTEGER_CST
4865 && TREE_CODE (rr_arg) == INTEGER_CST)
4866 l_const = lr_arg, r_const = rr_arg;
4867 else if (lr_inner == 0 || rr_inner == 0
4868 || ! operand_equal_p (lr_inner, rr_inner, 0))
4871 l_const = r_const = 0;
4873 /* If either comparison code is not correct for our logical operation,
4874 fail. However, we can convert a one-bit comparison against zero into
4875 the opposite comparison against that bit being set in the field. */
4877 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4878 if (lcode != wanted_code)
4880 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4882 /* Make the left operand unsigned, since we are only interested
4883 in the value of one bit. Otherwise we are doing the wrong
4892 /* This is analogous to the code for l_const above. */
4893 if (rcode != wanted_code)
4895 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4904 /* After this point all optimizations will generate bit-field
4905 references, which we might not want. */
4906 if (! lang_hooks.can_use_bit_fields_p ())
4909 /* See if we can find a mode that contains both fields being compared on
4910 the left. If we can't, fail. Otherwise, update all constants and masks
4911 to be relative to a field of that size. */
4912 first_bit = MIN (ll_bitpos, rl_bitpos);
4913 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4914 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4915 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4917 if (lnmode == VOIDmode)
4920 lnbitsize = GET_MODE_BITSIZE (lnmode);
4921 lnbitpos = first_bit & ~ (lnbitsize - 1);
4922 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4923 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4925 if (BYTES_BIG_ENDIAN)
4927 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4928 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4931 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4932 size_int (xll_bitpos), 0);
4933 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4934 size_int (xrl_bitpos), 0);
4938 l_const = fold_convert (lntype, l_const);
4939 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4940 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4941 if (integer_nonzerop (const_binop (BIT_AND_EXPR, l_const,
4942 fold_build1 (BIT_NOT_EXPR,
4946 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4948 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4953 r_const = fold_convert (lntype, r_const);
4954 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4955 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4956 if (integer_nonzerop (const_binop (BIT_AND_EXPR, r_const,
4957 fold_build1 (BIT_NOT_EXPR,
4961 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4963 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4967 /* If the right sides are not constant, do the same for it. Also,
4968 disallow this optimization if a size or signedness mismatch occurs
4969 between the left and right sides. */
4972 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4973 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4974 /* Make sure the two fields on the right
4975 correspond to the left without being swapped. */
4976 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4979 first_bit = MIN (lr_bitpos, rr_bitpos);
4980 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4981 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4982 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4984 if (rnmode == VOIDmode)
4987 rnbitsize = GET_MODE_BITSIZE (rnmode);
4988 rnbitpos = first_bit & ~ (rnbitsize - 1);
4989 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4990 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4992 if (BYTES_BIG_ENDIAN)
4994 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4995 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4998 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4999 size_int (xlr_bitpos), 0);
5000 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5001 size_int (xrr_bitpos), 0);
5003 /* Make a mask that corresponds to both fields being compared.
5004 Do this for both items being compared. If the operands are the
5005 same size and the bits being compared are in the same position
5006 then we can do this by masking both and comparing the masked
5008 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5009 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5010 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5012 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5013 ll_unsignedp || rl_unsignedp);
5014 if (! all_ones_mask_p (ll_mask, lnbitsize))
5015 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5017 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5018 lr_unsignedp || rr_unsignedp);
5019 if (! all_ones_mask_p (lr_mask, rnbitsize))
5020 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5022 return build2 (wanted_code, truth_type, lhs, rhs);
5025 /* There is still another way we can do something: If both pairs of
5026 fields being compared are adjacent, we may be able to make a wider
5027 field containing them both.
5029 Note that we still must mask the lhs/rhs expressions. Furthermore,
5030 the mask must be shifted to account for the shift done by
5031 make_bit_field_ref. */
5032 if ((ll_bitsize + ll_bitpos == rl_bitpos
5033 && lr_bitsize + lr_bitpos == rr_bitpos)
5034 || (ll_bitpos == rl_bitpos + rl_bitsize
5035 && lr_bitpos == rr_bitpos + rr_bitsize))
5039 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5040 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5041 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5042 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5044 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5045 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5046 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5047 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5049 /* Convert to the smaller type before masking out unwanted bits. */
5051 if (lntype != rntype)
5053 if (lnbitsize > rnbitsize)
5055 lhs = fold_convert (rntype, lhs);
5056 ll_mask = fold_convert (rntype, ll_mask);
5059 else if (lnbitsize < rnbitsize)
5061 rhs = fold_convert (lntype, rhs);
5062 lr_mask = fold_convert (lntype, lr_mask);
5067 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5068 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5070 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5071 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5073 return build2 (wanted_code, truth_type, lhs, rhs);
5079 /* Handle the case of comparisons with constants. If there is something in
5080 common between the masks, those bits of the constants must be the same.
5081 If not, the condition is always false. Test for this to avoid generating
5082 incorrect code below. */
5083 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5084 if (! integer_zerop (result)
5085 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5086 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5088 if (wanted_code == NE_EXPR)
5090 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5091 return constant_boolean_node (true, truth_type);
5095 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5096 return constant_boolean_node (false, truth_type);
5100 /* Construct the expression we will return. First get the component
5101 reference we will make. Unless the mask is all ones the width of
5102 that field, perform the mask operation. Then compare with the
5104 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5105 ll_unsignedp || rl_unsignedp);
5107 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5108 if (! all_ones_mask_p (ll_mask, lnbitsize))
5109 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5111 return build2 (wanted_code, truth_type, result,
5112 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5115 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5119 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5122 enum tree_code op_code;
5123 tree comp_const = op1;
5125 int consts_equal, consts_lt;
5128 STRIP_SIGN_NOPS (arg0);
5130 op_code = TREE_CODE (arg0);
5131 minmax_const = TREE_OPERAND (arg0, 1);
5132 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5133 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5134 inner = TREE_OPERAND (arg0, 0);
5136 /* If something does not permit us to optimize, return the original tree. */
5137 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5138 || TREE_CODE (comp_const) != INTEGER_CST
5139 || TREE_CONSTANT_OVERFLOW (comp_const)
5140 || TREE_CODE (minmax_const) != INTEGER_CST
5141 || TREE_CONSTANT_OVERFLOW (minmax_const))
5144 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5145 and GT_EXPR, doing the rest with recursive calls using logical
5149 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5151 /* FIXME: We should be able to invert code without building a
5152 scratch tree node, but doing so would require us to
5153 duplicate a part of invert_truthvalue here. */
5154 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5155 tem = optimize_minmax_comparison (TREE_CODE (tem),
5157 TREE_OPERAND (tem, 0),
5158 TREE_OPERAND (tem, 1));
5159 return invert_truthvalue (tem);
5164 fold_build2 (TRUTH_ORIF_EXPR, type,
5165 optimize_minmax_comparison
5166 (EQ_EXPR, type, arg0, comp_const),
5167 optimize_minmax_comparison
5168 (GT_EXPR, type, arg0, comp_const));
5171 if (op_code == MAX_EXPR && consts_equal)
5172 /* MAX (X, 0) == 0 -> X <= 0 */
5173 return fold_build2 (LE_EXPR, type, inner, comp_const);
5175 else if (op_code == MAX_EXPR && consts_lt)
5176 /* MAX (X, 0) == 5 -> X == 5 */
5177 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5179 else if (op_code == MAX_EXPR)
5180 /* MAX (X, 0) == -1 -> false */
5181 return omit_one_operand (type, integer_zero_node, inner);
5183 else if (consts_equal)
5184 /* MIN (X, 0) == 0 -> X >= 0 */
5185 return fold_build2 (GE_EXPR, type, inner, comp_const);
5188 /* MIN (X, 0) == 5 -> false */
5189 return omit_one_operand (type, integer_zero_node, inner);
5192 /* MIN (X, 0) == -1 -> X == -1 */
5193 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5196 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5197 /* MAX (X, 0) > 0 -> X > 0
5198 MAX (X, 0) > 5 -> X > 5 */
5199 return fold_build2 (GT_EXPR, type, inner, comp_const);
5201 else if (op_code == MAX_EXPR)
5202 /* MAX (X, 0) > -1 -> true */
5203 return omit_one_operand (type, integer_one_node, inner);
5205 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5206 /* MIN (X, 0) > 0 -> false
5207 MIN (X, 0) > 5 -> false */
5208 return omit_one_operand (type, integer_zero_node, inner);
5211 /* MIN (X, 0) > -1 -> X > -1 */
5212 return fold_build2 (GT_EXPR, type, inner, comp_const);
5219 /* T is an integer expression that is being multiplied, divided, or taken a
5220 modulus (CODE says which and what kind of divide or modulus) by a
5221 constant C. See if we can eliminate that operation by folding it with
5222 other operations already in T. WIDE_TYPE, if non-null, is a type that
5223 should be used for the computation if wider than our type.
5225 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5226 (X * 2) + (Y * 4). We must, however, be assured that either the original
5227 expression would not overflow or that overflow is undefined for the type
5228 in the language in question.
5230 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5231 the machine has a multiply-accumulate insn or that this is part of an
5232 addressing calculation.
5234 If we return a non-null expression, it is an equivalent form of the
5235 original computation, but need not be in the original type. */
5238 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5240 /* To avoid exponential search depth, refuse to allow recursion past
5241 three levels. Beyond that (1) it's highly unlikely that we'll find
5242 something interesting and (2) we've probably processed it before
5243 when we built the inner expression. */
5252 ret = extract_muldiv_1 (t, c, code, wide_type);
5259 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5261 tree type = TREE_TYPE (t);
5262 enum tree_code tcode = TREE_CODE (t);
5263 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5264 > GET_MODE_SIZE (TYPE_MODE (type)))
5265 ? wide_type : type);
5267 int same_p = tcode == code;
5268 tree op0 = NULL_TREE, op1 = NULL_TREE;
5270 /* Don't deal with constants of zero here; they confuse the code below. */
5271 if (integer_zerop (c))
5274 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5275 op0 = TREE_OPERAND (t, 0);
5277 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5278 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5280 /* Note that we need not handle conditional operations here since fold
5281 already handles those cases. So just do arithmetic here. */
5285 /* For a constant, we can always simplify if we are a multiply
5286 or (for divide and modulus) if it is a multiple of our constant. */
5287 if (code == MULT_EXPR
5288 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5289 return const_binop (code, fold_convert (ctype, t),
5290 fold_convert (ctype, c), 0);
5293 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5294 /* If op0 is an expression ... */
5295 if ((COMPARISON_CLASS_P (op0)
5296 || UNARY_CLASS_P (op0)
5297 || BINARY_CLASS_P (op0)
5298 || EXPRESSION_CLASS_P (op0))
5299 /* ... and is unsigned, and its type is smaller than ctype,
5300 then we cannot pass through as widening. */
5301 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5302 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5303 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5304 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5305 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5306 /* ... or this is a truncation (t is narrower than op0),
5307 then we cannot pass through this narrowing. */
5308 || (GET_MODE_SIZE (TYPE_MODE (type))
5309 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5310 /* ... or signedness changes for division or modulus,
5311 then we cannot pass through this conversion. */
5312 || (code != MULT_EXPR
5313 && (TYPE_UNSIGNED (ctype)
5314 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5317 /* Pass the constant down and see if we can make a simplification. If
5318 we can, replace this expression with the inner simplification for
5319 possible later conversion to our or some other type. */
5320 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5321 && TREE_CODE (t2) == INTEGER_CST
5322 && ! TREE_CONSTANT_OVERFLOW (t2)
5323 && (0 != (t1 = extract_muldiv (op0, t2, code,
5325 ? ctype : NULL_TREE))))
5330 /* If widening the type changes it from signed to unsigned, then we
5331 must avoid building ABS_EXPR itself as unsigned. */
5332 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5334 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5335 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5337 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5338 return fold_convert (ctype, t1);
5344 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5345 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5348 case MIN_EXPR: case MAX_EXPR:
5349 /* If widening the type changes the signedness, then we can't perform
5350 this optimization as that changes the result. */
5351 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5354 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5355 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5356 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5358 if (tree_int_cst_sgn (c) < 0)
5359 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5361 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5362 fold_convert (ctype, t2));
5366 case LSHIFT_EXPR: case RSHIFT_EXPR:
5367 /* If the second operand is constant, this is a multiplication
5368 or floor division, by a power of two, so we can treat it that
5369 way unless the multiplier or divisor overflows. Signed
5370 left-shift overflow is implementation-defined rather than
5371 undefined in C90, so do not convert signed left shift into
5373 if (TREE_CODE (op1) == INTEGER_CST
5374 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5375 /* const_binop may not detect overflow correctly,
5376 so check for it explicitly here. */
5377 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5378 && TREE_INT_CST_HIGH (op1) == 0
5379 && 0 != (t1 = fold_convert (ctype,
5380 const_binop (LSHIFT_EXPR,
5383 && ! TREE_OVERFLOW (t1))
5384 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5385 ? MULT_EXPR : FLOOR_DIV_EXPR,
5386 ctype, fold_convert (ctype, op0), t1),
5387 c, code, wide_type);
5390 case PLUS_EXPR: case MINUS_EXPR:
5391 /* See if we can eliminate the operation on both sides. If we can, we
5392 can return a new PLUS or MINUS. If we can't, the only remaining
5393 cases where we can do anything are if the second operand is a
5395 t1 = extract_muldiv (op0, c, code, wide_type);
5396 t2 = extract_muldiv (op1, c, code, wide_type);
5397 if (t1 != 0 && t2 != 0
5398 && (code == MULT_EXPR
5399 /* If not multiplication, we can only do this if both operands
5400 are divisible by c. */
5401 || (multiple_of_p (ctype, op0, c)
5402 && multiple_of_p (ctype, op1, c))))
5403 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5404 fold_convert (ctype, t2));
5406 /* If this was a subtraction, negate OP1 and set it to be an addition.
5407 This simplifies the logic below. */
5408 if (tcode == MINUS_EXPR)
5409 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5411 if (TREE_CODE (op1) != INTEGER_CST)
5414 /* If either OP1 or C are negative, this optimization is not safe for
5415 some of the division and remainder types while for others we need
5416 to change the code. */
5417 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5419 if (code == CEIL_DIV_EXPR)
5420 code = FLOOR_DIV_EXPR;
5421 else if (code == FLOOR_DIV_EXPR)
5422 code = CEIL_DIV_EXPR;
5423 else if (code != MULT_EXPR
5424 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5428 /* If it's a multiply or a division/modulus operation of a multiple
5429 of our constant, do the operation and verify it doesn't overflow. */
5430 if (code == MULT_EXPR
5431 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5433 op1 = const_binop (code, fold_convert (ctype, op1),
5434 fold_convert (ctype, c), 0);
5435 /* We allow the constant to overflow with wrapping semantics. */
5437 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5443 /* If we have an unsigned type is not a sizetype, we cannot widen
5444 the operation since it will change the result if the original
5445 computation overflowed. */
5446 if (TYPE_UNSIGNED (ctype)
5447 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5451 /* If we were able to eliminate our operation from the first side,
5452 apply our operation to the second side and reform the PLUS. */
5453 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5454 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5456 /* The last case is if we are a multiply. In that case, we can
5457 apply the distributive law to commute the multiply and addition
5458 if the multiplication of the constants doesn't overflow. */
5459 if (code == MULT_EXPR)
5460 return fold_build2 (tcode, ctype,
5461 fold_build2 (code, ctype,
5462 fold_convert (ctype, op0),
5463 fold_convert (ctype, c)),
5469 /* We have a special case here if we are doing something like
5470 (C * 8) % 4 since we know that's zero. */
5471 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5472 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5473 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5474 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5475 return omit_one_operand (type, integer_zero_node, op0);
5477 /* ... fall through ... */
5479 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5480 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5481 /* If we can extract our operation from the LHS, do so and return a
5482 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5483 do something only if the second operand is a constant. */
5485 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5486 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5487 fold_convert (ctype, op1));
5488 else if (tcode == MULT_EXPR && code == MULT_EXPR
5489 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5490 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5491 fold_convert (ctype, t1));
5492 else if (TREE_CODE (op1) != INTEGER_CST)
5495 /* If these are the same operation types, we can associate them
5496 assuming no overflow. */
5498 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5499 fold_convert (ctype, c), 0))
5500 && ! TREE_OVERFLOW (t1))
5501 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5503 /* If these operations "cancel" each other, we have the main
5504 optimizations of this pass, which occur when either constant is a
5505 multiple of the other, in which case we replace this with either an
5506 operation or CODE or TCODE.
5508 If we have an unsigned type that is not a sizetype, we cannot do
5509 this since it will change the result if the original computation
5511 if ((! TYPE_UNSIGNED (ctype)
5512 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5514 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5515 || (tcode == MULT_EXPR
5516 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5517 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5519 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5520 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5521 fold_convert (ctype,
5522 const_binop (TRUNC_DIV_EXPR,
5524 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5525 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5526 fold_convert (ctype,
5527 const_binop (TRUNC_DIV_EXPR,
5539 /* Return a node which has the indicated constant VALUE (either 0 or
5540 1), and is of the indicated TYPE. */
5543 constant_boolean_node (int value, tree type)
5545 if (type == integer_type_node)
5546 return value ? integer_one_node : integer_zero_node;
5547 else if (type == boolean_type_node)
5548 return value ? boolean_true_node : boolean_false_node;
5550 return build_int_cst (type, value);
5554 /* Return true if expr looks like an ARRAY_REF and set base and
5555 offset to the appropriate trees. If there is no offset,
5556 offset is set to NULL_TREE. Base will be canonicalized to
5557 something you can get the element type from using
5558 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5559 in bytes to the base. */
5562 extract_array_ref (tree expr, tree *base, tree *offset)
5564 /* One canonical form is a PLUS_EXPR with the first
5565 argument being an ADDR_EXPR with a possible NOP_EXPR
5567 if (TREE_CODE (expr) == PLUS_EXPR)
5569 tree op0 = TREE_OPERAND (expr, 0);
5570 tree inner_base, dummy1;
5571 /* Strip NOP_EXPRs here because the C frontends and/or
5572 folders present us (int *)&x.a + 4B possibly. */
5574 if (extract_array_ref (op0, &inner_base, &dummy1))
5577 if (dummy1 == NULL_TREE)
5578 *offset = TREE_OPERAND (expr, 1);
5580 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5581 dummy1, TREE_OPERAND (expr, 1));
5585 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5586 which we transform into an ADDR_EXPR with appropriate
5587 offset. For other arguments to the ADDR_EXPR we assume
5588 zero offset and as such do not care about the ADDR_EXPR
5589 type and strip possible nops from it. */
5590 else if (TREE_CODE (expr) == ADDR_EXPR)
5592 tree op0 = TREE_OPERAND (expr, 0);
5593 if (TREE_CODE (op0) == ARRAY_REF)
5595 tree idx = TREE_OPERAND (op0, 1);
5596 *base = TREE_OPERAND (op0, 0);
5597 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5598 array_ref_element_size (op0));
5602 /* Handle array-to-pointer decay as &a. */
5603 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5604 *base = TREE_OPERAND (expr, 0);
5607 *offset = NULL_TREE;
5611 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5612 else if (SSA_VAR_P (expr)
5613 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5616 *offset = NULL_TREE;
5624 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5625 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5626 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5627 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5628 COND is the first argument to CODE; otherwise (as in the example
5629 given here), it is the second argument. TYPE is the type of the
5630 original expression. Return NULL_TREE if no simplification is
5634 fold_binary_op_with_conditional_arg (enum tree_code code,
5635 tree type, tree op0, tree op1,
5636 tree cond, tree arg, int cond_first_p)
5638 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5639 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5640 tree test, true_value, false_value;
5641 tree lhs = NULL_TREE;
5642 tree rhs = NULL_TREE;
5644 /* This transformation is only worthwhile if we don't have to wrap
5645 arg in a SAVE_EXPR, and the operation can be simplified on at least
5646 one of the branches once its pushed inside the COND_EXPR. */
5647 if (!TREE_CONSTANT (arg))
5650 if (TREE_CODE (cond) == COND_EXPR)
5652 test = TREE_OPERAND (cond, 0);
5653 true_value = TREE_OPERAND (cond, 1);
5654 false_value = TREE_OPERAND (cond, 2);
5655 /* If this operand throws an expression, then it does not make
5656 sense to try to perform a logical or arithmetic operation
5658 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5660 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5665 tree testtype = TREE_TYPE (cond);
5667 true_value = constant_boolean_node (true, testtype);
5668 false_value = constant_boolean_node (false, testtype);
5671 arg = fold_convert (arg_type, arg);
5674 true_value = fold_convert (cond_type, true_value);
5676 lhs = fold_build2 (code, type, true_value, arg);
5678 lhs = fold_build2 (code, type, arg, true_value);
5682 false_value = fold_convert (cond_type, false_value);
5684 rhs = fold_build2 (code, type, false_value, arg);
5686 rhs = fold_build2 (code, type, arg, false_value);
5689 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5690 return fold_convert (type, test);
5694 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5696 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5697 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5698 ADDEND is the same as X.
5700 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5701 and finite. The problematic cases are when X is zero, and its mode
5702 has signed zeros. In the case of rounding towards -infinity,
5703 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5704 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5707 fold_real_zero_addition_p (tree type, tree addend, int negate)
5709 if (!real_zerop (addend))
5712 /* Don't allow the fold with -fsignaling-nans. */
5713 if (HONOR_SNANS (TYPE_MODE (type)))
5716 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5717 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5720 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5721 if (TREE_CODE (addend) == REAL_CST
5722 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5725 /* The mode has signed zeros, and we have to honor their sign.
5726 In this situation, there is only one case we can return true for.
5727 X - 0 is the same as X unless rounding towards -infinity is
5729 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5732 /* Subroutine of fold() that checks comparisons of built-in math
5733 functions against real constants.
5735 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5736 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5737 is the type of the result and ARG0 and ARG1 are the operands of the
5738 comparison. ARG1 must be a TREE_REAL_CST.
5740 The function returns the constant folded tree if a simplification
5741 can be made, and NULL_TREE otherwise. */
5744 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5745 tree type, tree arg0, tree arg1)
5749 if (BUILTIN_SQRT_P (fcode))
5751 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5752 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5754 c = TREE_REAL_CST (arg1);
5755 if (REAL_VALUE_NEGATIVE (c))
5757 /* sqrt(x) < y is always false, if y is negative. */
5758 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5759 return omit_one_operand (type, integer_zero_node, arg);
5761 /* sqrt(x) > y is always true, if y is negative and we
5762 don't care about NaNs, i.e. negative values of x. */
5763 if (code == NE_EXPR || !HONOR_NANS (mode))
5764 return omit_one_operand (type, integer_one_node, arg);
5766 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5767 return fold_build2 (GE_EXPR, type, arg,
5768 build_real (TREE_TYPE (arg), dconst0));
5770 else if (code == GT_EXPR || code == GE_EXPR)
5774 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5775 real_convert (&c2, mode, &c2);
5777 if (REAL_VALUE_ISINF (c2))
5779 /* sqrt(x) > y is x == +Inf, when y is very large. */
5780 if (HONOR_INFINITIES (mode))
5781 return fold_build2 (EQ_EXPR, type, arg,
5782 build_real (TREE_TYPE (arg), c2));
5784 /* sqrt(x) > y is always false, when y is very large
5785 and we don't care about infinities. */
5786 return omit_one_operand (type, integer_zero_node, arg);
5789 /* sqrt(x) > c is the same as x > c*c. */
5790 return fold_build2 (code, type, arg,
5791 build_real (TREE_TYPE (arg), c2));
5793 else if (code == LT_EXPR || code == LE_EXPR)
5797 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5798 real_convert (&c2, mode, &c2);
5800 if (REAL_VALUE_ISINF (c2))
5802 /* sqrt(x) < y is always true, when y is a very large
5803 value and we don't care about NaNs or Infinities. */
5804 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5805 return omit_one_operand (type, integer_one_node, arg);
5807 /* sqrt(x) < y is x != +Inf when y is very large and we
5808 don't care about NaNs. */
5809 if (! HONOR_NANS (mode))
5810 return fold_build2 (NE_EXPR, type, arg,
5811 build_real (TREE_TYPE (arg), c2));
5813 /* sqrt(x) < y is x >= 0 when y is very large and we
5814 don't care about Infinities. */
5815 if (! HONOR_INFINITIES (mode))
5816 return fold_build2 (GE_EXPR, type, arg,
5817 build_real (TREE_TYPE (arg), dconst0));
5819 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5820 if (lang_hooks.decls.global_bindings_p () != 0
5821 || CONTAINS_PLACEHOLDER_P (arg))
5824 arg = save_expr (arg);
5825 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5826 fold_build2 (GE_EXPR, type, arg,
5827 build_real (TREE_TYPE (arg),
5829 fold_build2 (NE_EXPR, type, arg,
5830 build_real (TREE_TYPE (arg),
5834 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5835 if (! HONOR_NANS (mode))
5836 return fold_build2 (code, type, arg,
5837 build_real (TREE_TYPE (arg), c2));
5839 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5840 if (lang_hooks.decls.global_bindings_p () == 0
5841 && ! CONTAINS_PLACEHOLDER_P (arg))
5843 arg = save_expr (arg);
5844 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5845 fold_build2 (GE_EXPR, type, arg,
5846 build_real (TREE_TYPE (arg),
5848 fold_build2 (code, type, arg,
5849 build_real (TREE_TYPE (arg),
5858 /* Subroutine of fold() that optimizes comparisons against Infinities,
5859 either +Inf or -Inf.
5861 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5862 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5863 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5865 The function returns the constant folded tree if a simplification
5866 can be made, and NULL_TREE otherwise. */
5869 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5871 enum machine_mode mode;
5872 REAL_VALUE_TYPE max;
5876 mode = TYPE_MODE (TREE_TYPE (arg0));
5878 /* For negative infinity swap the sense of the comparison. */
5879 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5881 code = swap_tree_comparison (code);
5886 /* x > +Inf is always false, if with ignore sNANs. */
5887 if (HONOR_SNANS (mode))
5889 return omit_one_operand (type, integer_zero_node, arg0);
5892 /* x <= +Inf is always true, if we don't case about NaNs. */
5893 if (! HONOR_NANS (mode))
5894 return omit_one_operand (type, integer_one_node, arg0);
5896 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5897 if (lang_hooks.decls.global_bindings_p () == 0
5898 && ! CONTAINS_PLACEHOLDER_P (arg0))
5900 arg0 = save_expr (arg0);
5901 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5907 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5908 real_maxval (&max, neg, mode);
5909 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5910 arg0, build_real (TREE_TYPE (arg0), max));
5913 /* x < +Inf is always equal to x <= DBL_MAX. */
5914 real_maxval (&max, neg, mode);
5915 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5916 arg0, build_real (TREE_TYPE (arg0), max));
5919 /* x != +Inf is always equal to !(x > DBL_MAX). */
5920 real_maxval (&max, neg, mode);
5921 if (! HONOR_NANS (mode))
5922 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5923 arg0, build_real (TREE_TYPE (arg0), max));
5925 /* The transformation below creates non-gimple code and thus is
5926 not appropriate if we are in gimple form. */
5930 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5931 arg0, build_real (TREE_TYPE (arg0), max));
5932 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5941 /* Subroutine of fold() that optimizes comparisons of a division by
5942 a nonzero integer constant against an integer constant, i.e.
5945 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5946 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5947 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5949 The function returns the constant folded tree if a simplification
5950 can be made, and NULL_TREE otherwise. */
5953 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5955 tree prod, tmp, hi, lo;
5956 tree arg00 = TREE_OPERAND (arg0, 0);
5957 tree arg01 = TREE_OPERAND (arg0, 1);
5958 unsigned HOST_WIDE_INT lpart;
5959 HOST_WIDE_INT hpart;
5960 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
5963 /* We have to do this the hard way to detect unsigned overflow.
5964 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5965 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
5966 TREE_INT_CST_HIGH (arg01),
5967 TREE_INT_CST_LOW (arg1),
5968 TREE_INT_CST_HIGH (arg1),
5969 &lpart, &hpart, unsigned_p);
5970 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5971 prod = force_fit_type (prod, -1, overflow, false);
5975 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5978 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5979 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
5980 TREE_INT_CST_HIGH (prod),
5981 TREE_INT_CST_LOW (tmp),
5982 TREE_INT_CST_HIGH (tmp),
5983 &lpart, &hpart, unsigned_p);
5984 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5985 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5986 TREE_CONSTANT_OVERFLOW (prod));
5988 else if (tree_int_cst_sgn (arg01) >= 0)
5990 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5991 switch (tree_int_cst_sgn (arg1))
5994 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5999 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6004 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6014 /* A negative divisor reverses the relational operators. */
6015 code = swap_tree_comparison (code);
6017 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6018 switch (tree_int_cst_sgn (arg1))
6021 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6026 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6031 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6043 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6044 return omit_one_operand (type, integer_zero_node, arg00);
6045 if (TREE_OVERFLOW (hi))
6046 return fold_build2 (GE_EXPR, type, arg00, lo);
6047 if (TREE_OVERFLOW (lo))
6048 return fold_build2 (LE_EXPR, type, arg00, hi);
6049 return build_range_check (type, arg00, 1, lo, hi);
6052 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6053 return omit_one_operand (type, integer_one_node, arg00);
6054 if (TREE_OVERFLOW (hi))
6055 return fold_build2 (LT_EXPR, type, arg00, lo);
6056 if (TREE_OVERFLOW (lo))
6057 return fold_build2 (GT_EXPR, type, arg00, hi);
6058 return build_range_check (type, arg00, 0, lo, hi);
6061 if (TREE_OVERFLOW (lo))
6062 return omit_one_operand (type, integer_zero_node, arg00);
6063 return fold_build2 (LT_EXPR, type, arg00, lo);
6066 if (TREE_OVERFLOW (hi))
6067 return omit_one_operand (type, integer_one_node, arg00);
6068 return fold_build2 (LE_EXPR, type, arg00, hi);
6071 if (TREE_OVERFLOW (hi))
6072 return omit_one_operand (type, integer_zero_node, arg00);
6073 return fold_build2 (GT_EXPR, type, arg00, hi);
6076 if (TREE_OVERFLOW (lo))
6077 return omit_one_operand (type, integer_one_node, arg00);
6078 return fold_build2 (GE_EXPR, type, arg00, lo);
6088 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6089 equality/inequality test, then return a simplified form of the test
6090 using a sign testing. Otherwise return NULL. TYPE is the desired
6094 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6097 /* If this is testing a single bit, we can optimize the test. */
6098 if ((code == NE_EXPR || code == EQ_EXPR)
6099 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6100 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6102 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6103 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6104 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6106 if (arg00 != NULL_TREE
6107 /* This is only a win if casting to a signed type is cheap,
6108 i.e. when arg00's type is not a partial mode. */
6109 && TYPE_PRECISION (TREE_TYPE (arg00))
6110 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6112 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6113 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6114 result_type, fold_convert (stype, arg00),
6115 fold_convert (stype, integer_zero_node));
6122 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6123 equality/inequality test, then return a simplified form of
6124 the test using shifts and logical operations. Otherwise return
6125 NULL. TYPE is the desired result type. */
6128 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6131 /* If this is testing a single bit, we can optimize the test. */
6132 if ((code == NE_EXPR || code == EQ_EXPR)
6133 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6134 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6136 tree inner = TREE_OPERAND (arg0, 0);
6137 tree type = TREE_TYPE (arg0);
6138 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6139 enum machine_mode operand_mode = TYPE_MODE (type);
6141 tree signed_type, unsigned_type, intermediate_type;
6144 /* First, see if we can fold the single bit test into a sign-bit
6146 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6151 /* Otherwise we have (A & C) != 0 where C is a single bit,
6152 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6153 Similarly for (A & C) == 0. */
6155 /* If INNER is a right shift of a constant and it plus BITNUM does
6156 not overflow, adjust BITNUM and INNER. */
6157 if (TREE_CODE (inner) == RSHIFT_EXPR
6158 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6159 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6160 && bitnum < TYPE_PRECISION (type)
6161 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6162 bitnum - TYPE_PRECISION (type)))
6164 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6165 inner = TREE_OPERAND (inner, 0);
6168 /* If we are going to be able to omit the AND below, we must do our
6169 operations as unsigned. If we must use the AND, we have a choice.
6170 Normally unsigned is faster, but for some machines signed is. */
6171 #ifdef LOAD_EXTEND_OP
6172 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6173 && !flag_syntax_only) ? 0 : 1;
6178 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6179 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6180 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6181 inner = fold_convert (intermediate_type, inner);
6184 inner = build2 (RSHIFT_EXPR, intermediate_type,
6185 inner, size_int (bitnum));
6187 if (code == EQ_EXPR)
6188 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6189 inner, integer_one_node);
6191 /* Put the AND last so it can combine with more things. */
6192 inner = build2 (BIT_AND_EXPR, intermediate_type,
6193 inner, integer_one_node);
6195 /* Make sure to return the proper type. */
6196 inner = fold_convert (result_type, inner);
6203 /* Check whether we are allowed to reorder operands arg0 and arg1,
6204 such that the evaluation of arg1 occurs before arg0. */
6207 reorder_operands_p (tree arg0, tree arg1)
6209 if (! flag_evaluation_order)
6211 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6213 return ! TREE_SIDE_EFFECTS (arg0)
6214 && ! TREE_SIDE_EFFECTS (arg1);
6217 /* Test whether it is preferable two swap two operands, ARG0 and
6218 ARG1, for example because ARG0 is an integer constant and ARG1
6219 isn't. If REORDER is true, only recommend swapping if we can
6220 evaluate the operands in reverse order. */
6223 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6225 STRIP_SIGN_NOPS (arg0);
6226 STRIP_SIGN_NOPS (arg1);
6228 if (TREE_CODE (arg1) == INTEGER_CST)
6230 if (TREE_CODE (arg0) == INTEGER_CST)
6233 if (TREE_CODE (arg1) == REAL_CST)
6235 if (TREE_CODE (arg0) == REAL_CST)
6238 if (TREE_CODE (arg1) == COMPLEX_CST)
6240 if (TREE_CODE (arg0) == COMPLEX_CST)
6243 if (TREE_CONSTANT (arg1))
6245 if (TREE_CONSTANT (arg0))
6251 if (reorder && flag_evaluation_order
6252 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6260 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6261 for commutative and comparison operators. Ensuring a canonical
6262 form allows the optimizers to find additional redundancies without
6263 having to explicitly check for both orderings. */
6264 if (TREE_CODE (arg0) == SSA_NAME
6265 && TREE_CODE (arg1) == SSA_NAME
6266 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6272 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6273 ARG0 is extended to a wider type. */
6276 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6278 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6280 tree shorter_type, outer_type;
6284 if (arg0_unw == arg0)
6286 shorter_type = TREE_TYPE (arg0_unw);
6288 #ifdef HAVE_canonicalize_funcptr_for_compare
6289 /* Disable this optimization if we're casting a function pointer
6290 type on targets that require function pointer canonicalization. */
6291 if (HAVE_canonicalize_funcptr_for_compare
6292 && TREE_CODE (shorter_type) == POINTER_TYPE
6293 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6297 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6300 arg1_unw = get_unwidened (arg1, shorter_type);
6302 /* If possible, express the comparison in the shorter mode. */
6303 if ((code == EQ_EXPR || code == NE_EXPR
6304 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6305 && (TREE_TYPE (arg1_unw) == shorter_type
6306 || (TREE_CODE (arg1_unw) == INTEGER_CST
6307 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6308 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6309 && int_fits_type_p (arg1_unw, shorter_type))))
6310 return fold_build2 (code, type, arg0_unw,
6311 fold_convert (shorter_type, arg1_unw));
6313 if (TREE_CODE (arg1_unw) != INTEGER_CST
6314 || TREE_CODE (shorter_type) != INTEGER_TYPE
6315 || !int_fits_type_p (arg1_unw, shorter_type))
6318 /* If we are comparing with the integer that does not fit into the range
6319 of the shorter type, the result is known. */
6320 outer_type = TREE_TYPE (arg1_unw);
6321 min = lower_bound_in_type (outer_type, shorter_type);
6322 max = upper_bound_in_type (outer_type, shorter_type);
6324 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6326 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6333 return omit_one_operand (type, integer_zero_node, arg0);
6338 return omit_one_operand (type, integer_one_node, arg0);
6344 return omit_one_operand (type, integer_one_node, arg0);
6346 return omit_one_operand (type, integer_zero_node, arg0);
6351 return omit_one_operand (type, integer_zero_node, arg0);
6353 return omit_one_operand (type, integer_one_node, arg0);
6362 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6363 ARG0 just the signedness is changed. */
6366 fold_sign_changed_comparison (enum tree_code code, tree type,
6367 tree arg0, tree arg1)
6369 tree arg0_inner, tmp;
6370 tree inner_type, outer_type;
6372 if (TREE_CODE (arg0) != NOP_EXPR
6373 && TREE_CODE (arg0) != CONVERT_EXPR)
6376 outer_type = TREE_TYPE (arg0);
6377 arg0_inner = TREE_OPERAND (arg0, 0);
6378 inner_type = TREE_TYPE (arg0_inner);
6380 #ifdef HAVE_canonicalize_funcptr_for_compare
6381 /* Disable this optimization if we're casting a function pointer
6382 type on targets that require function pointer canonicalization. */
6383 if (HAVE_canonicalize_funcptr_for_compare
6384 && TREE_CODE (inner_type) == POINTER_TYPE
6385 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6389 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6392 if (TREE_CODE (arg1) != INTEGER_CST
6393 && !((TREE_CODE (arg1) == NOP_EXPR
6394 || TREE_CODE (arg1) == CONVERT_EXPR)
6395 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6398 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6403 if (TREE_CODE (arg1) == INTEGER_CST)
6405 tmp = build_int_cst_wide (inner_type,
6406 TREE_INT_CST_LOW (arg1),
6407 TREE_INT_CST_HIGH (arg1));
6408 arg1 = force_fit_type (tmp, 0,
6409 TREE_OVERFLOW (arg1),
6410 TREE_CONSTANT_OVERFLOW (arg1));
6413 arg1 = fold_convert (inner_type, arg1);
6415 return fold_build2 (code, type, arg0_inner, arg1);
6418 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6419 step of the array. Reconstructs s and delta in the case of s * delta
6420 being an integer constant (and thus already folded).
6421 ADDR is the address. MULT is the multiplicative expression.
6422 If the function succeeds, the new address expression is returned. Otherwise
6423 NULL_TREE is returned. */
6426 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6428 tree s, delta, step;
6429 tree ref = TREE_OPERAND (addr, 0), pref;
6433 /* Canonicalize op1 into a possibly non-constant delta
6434 and an INTEGER_CST s. */
6435 if (TREE_CODE (op1) == MULT_EXPR)
6437 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6442 if (TREE_CODE (arg0) == INTEGER_CST)
6447 else if (TREE_CODE (arg1) == INTEGER_CST)
6455 else if (TREE_CODE (op1) == INTEGER_CST)
6462 /* Simulate we are delta * 1. */
6464 s = integer_one_node;
6467 for (;; ref = TREE_OPERAND (ref, 0))
6469 if (TREE_CODE (ref) == ARRAY_REF)
6471 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6475 step = array_ref_element_size (ref);
6476 if (TREE_CODE (step) != INTEGER_CST)
6481 if (! tree_int_cst_equal (step, s))
6486 /* Try if delta is a multiple of step. */
6487 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6496 if (!handled_component_p (ref))
6500 /* We found the suitable array reference. So copy everything up to it,
6501 and replace the index. */
6503 pref = TREE_OPERAND (addr, 0);
6504 ret = copy_node (pref);
6509 pref = TREE_OPERAND (pref, 0);
6510 TREE_OPERAND (pos, 0) = copy_node (pref);
6511 pos = TREE_OPERAND (pos, 0);
6514 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6515 fold_convert (itype,
6516 TREE_OPERAND (pos, 1)),
6517 fold_convert (itype, delta));
6519 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6523 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6524 means A >= Y && A != MAX, but in this case we know that
6525 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6528 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6530 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6532 if (TREE_CODE (bound) == LT_EXPR)
6533 a = TREE_OPERAND (bound, 0);
6534 else if (TREE_CODE (bound) == GT_EXPR)
6535 a = TREE_OPERAND (bound, 1);
6539 typea = TREE_TYPE (a);
6540 if (!INTEGRAL_TYPE_P (typea)
6541 && !POINTER_TYPE_P (typea))
6544 if (TREE_CODE (ineq) == LT_EXPR)
6546 a1 = TREE_OPERAND (ineq, 1);
6547 y = TREE_OPERAND (ineq, 0);
6549 else if (TREE_CODE (ineq) == GT_EXPR)
6551 a1 = TREE_OPERAND (ineq, 0);
6552 y = TREE_OPERAND (ineq, 1);
6557 if (TREE_TYPE (a1) != typea)
6560 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6561 if (!integer_onep (diff))
6564 return fold_build2 (GE_EXPR, type, a, y);
6567 /* Fold a unary expression of code CODE and type TYPE with operand
6568 OP0. Return the folded expression if folding is successful.
6569 Otherwise, return NULL_TREE. */
6572 fold_unary (enum tree_code code, tree type, tree op0)
6576 enum tree_code_class kind = TREE_CODE_CLASS (code);
6578 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6579 && TREE_CODE_LENGTH (code) == 1);
6584 if (code == NOP_EXPR || code == CONVERT_EXPR
6585 || code == FLOAT_EXPR || code == ABS_EXPR)
6587 /* Don't use STRIP_NOPS, because signedness of argument type
6589 STRIP_SIGN_NOPS (arg0);
6593 /* Strip any conversions that don't change the mode. This
6594 is safe for every expression, except for a comparison
6595 expression because its signedness is derived from its
6598 Note that this is done as an internal manipulation within
6599 the constant folder, in order to find the simplest
6600 representation of the arguments so that their form can be
6601 studied. In any cases, the appropriate type conversions
6602 should be put back in the tree that will get out of the
6608 if (TREE_CODE_CLASS (code) == tcc_unary)
6610 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6611 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6612 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6613 else if (TREE_CODE (arg0) == COND_EXPR)
6615 tree arg01 = TREE_OPERAND (arg0, 1);
6616 tree arg02 = TREE_OPERAND (arg0, 2);
6617 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6618 arg01 = fold_build1 (code, type, arg01);
6619 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6620 arg02 = fold_build1 (code, type, arg02);
6621 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6624 /* If this was a conversion, and all we did was to move into
6625 inside the COND_EXPR, bring it back out. But leave it if
6626 it is a conversion from integer to integer and the
6627 result precision is no wider than a word since such a
6628 conversion is cheap and may be optimized away by combine,
6629 while it couldn't if it were outside the COND_EXPR. Then return
6630 so we don't get into an infinite recursion loop taking the
6631 conversion out and then back in. */
6633 if ((code == NOP_EXPR || code == CONVERT_EXPR
6634 || code == NON_LVALUE_EXPR)
6635 && TREE_CODE (tem) == COND_EXPR
6636 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6637 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6638 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6639 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6640 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6641 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6642 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6644 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6645 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6646 || flag_syntax_only))
6647 tem = build1 (code, type,
6649 TREE_TYPE (TREE_OPERAND
6650 (TREE_OPERAND (tem, 1), 0)),
6651 TREE_OPERAND (tem, 0),
6652 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6653 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6656 else if (COMPARISON_CLASS_P (arg0))
6658 if (TREE_CODE (type) == BOOLEAN_TYPE)
6660 arg0 = copy_node (arg0);
6661 TREE_TYPE (arg0) = type;
6664 else if (TREE_CODE (type) != INTEGER_TYPE)
6665 return fold_build3 (COND_EXPR, type, arg0,
6666 fold_build1 (code, type,
6668 fold_build1 (code, type,
6669 integer_zero_node));
6678 case FIX_TRUNC_EXPR:
6680 case FIX_FLOOR_EXPR:
6681 case FIX_ROUND_EXPR:
6682 if (TREE_TYPE (op0) == type)
6685 /* Handle cases of two conversions in a row. */
6686 if (TREE_CODE (op0) == NOP_EXPR
6687 || TREE_CODE (op0) == CONVERT_EXPR)
6689 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6690 tree inter_type = TREE_TYPE (op0);
6691 int inside_int = INTEGRAL_TYPE_P (inside_type);
6692 int inside_ptr = POINTER_TYPE_P (inside_type);
6693 int inside_float = FLOAT_TYPE_P (inside_type);
6694 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6695 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6696 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6697 int inter_int = INTEGRAL_TYPE_P (inter_type);
6698 int inter_ptr = POINTER_TYPE_P (inter_type);
6699 int inter_float = FLOAT_TYPE_P (inter_type);
6700 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6701 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6702 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6703 int final_int = INTEGRAL_TYPE_P (type);
6704 int final_ptr = POINTER_TYPE_P (type);
6705 int final_float = FLOAT_TYPE_P (type);
6706 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6707 unsigned int final_prec = TYPE_PRECISION (type);
6708 int final_unsignedp = TYPE_UNSIGNED (type);
6710 /* In addition to the cases of two conversions in a row
6711 handled below, if we are converting something to its own
6712 type via an object of identical or wider precision, neither
6713 conversion is needed. */
6714 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6715 && ((inter_int && final_int) || (inter_float && final_float))
6716 && inter_prec >= final_prec)
6717 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6719 /* Likewise, if the intermediate and final types are either both
6720 float or both integer, we don't need the middle conversion if
6721 it is wider than the final type and doesn't change the signedness
6722 (for integers). Avoid this if the final type is a pointer
6723 since then we sometimes need the inner conversion. Likewise if
6724 the outer has a precision not equal to the size of its mode. */
6725 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6726 || (inter_float && inside_float)
6727 || (inter_vec && inside_vec))
6728 && inter_prec >= inside_prec
6729 && (inter_float || inter_vec
6730 || inter_unsignedp == inside_unsignedp)
6731 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6732 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6734 && (! final_vec || inter_prec == inside_prec))
6735 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6737 /* If we have a sign-extension of a zero-extended value, we can
6738 replace that by a single zero-extension. */
6739 if (inside_int && inter_int && final_int
6740 && inside_prec < inter_prec && inter_prec < final_prec
6741 && inside_unsignedp && !inter_unsignedp)
6742 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6744 /* Two conversions in a row are not needed unless:
6745 - some conversion is floating-point (overstrict for now), or
6746 - some conversion is a vector (overstrict for now), or
6747 - the intermediate type is narrower than both initial and
6749 - the intermediate type and innermost type differ in signedness,
6750 and the outermost type is wider than the intermediate, or
6751 - the initial type is a pointer type and the precisions of the
6752 intermediate and final types differ, or
6753 - the final type is a pointer type and the precisions of the
6754 initial and intermediate types differ. */
6755 if (! inside_float && ! inter_float && ! final_float
6756 && ! inside_vec && ! inter_vec && ! final_vec
6757 && (inter_prec > inside_prec || inter_prec > final_prec)
6758 && ! (inside_int && inter_int
6759 && inter_unsignedp != inside_unsignedp
6760 && inter_prec < final_prec)
6761 && ((inter_unsignedp && inter_prec > inside_prec)
6762 == (final_unsignedp && final_prec > inter_prec))
6763 && ! (inside_ptr && inter_prec != final_prec)
6764 && ! (final_ptr && inside_prec != inter_prec)
6765 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6766 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6768 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6771 /* Handle (T *)&A.B.C for A being of type T and B and C
6772 living at offset zero. This occurs frequently in
6773 C++ upcasting and then accessing the base. */
6774 if (TREE_CODE (op0) == ADDR_EXPR
6775 && POINTER_TYPE_P (type)
6776 && handled_component_p (TREE_OPERAND (op0, 0)))
6778 HOST_WIDE_INT bitsize, bitpos;
6780 enum machine_mode mode;
6781 int unsignedp, volatilep;
6782 tree base = TREE_OPERAND (op0, 0);
6783 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6784 &mode, &unsignedp, &volatilep, false);
6785 /* If the reference was to a (constant) zero offset, we can use
6786 the address of the base if it has the same base type
6787 as the result type. */
6788 if (! offset && bitpos == 0
6789 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6790 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6791 return fold_convert (type, build_fold_addr_expr (base));
6794 if (TREE_CODE (op0) == MODIFY_EXPR
6795 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6796 /* Detect assigning a bitfield. */
6797 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6798 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6800 /* Don't leave an assignment inside a conversion
6801 unless assigning a bitfield. */
6802 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6803 /* First do the assignment, then return converted constant. */
6804 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6805 TREE_NO_WARNING (tem) = 1;
6806 TREE_USED (tem) = 1;
6810 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6811 constants (if x has signed type, the sign bit cannot be set
6812 in c). This folds extension into the BIT_AND_EXPR. */
6813 if (INTEGRAL_TYPE_P (type)
6814 && TREE_CODE (type) != BOOLEAN_TYPE
6815 && TREE_CODE (op0) == BIT_AND_EXPR
6816 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6819 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6822 if (TYPE_UNSIGNED (TREE_TYPE (and))
6823 || (TYPE_PRECISION (type)
6824 <= TYPE_PRECISION (TREE_TYPE (and))))
6826 else if (TYPE_PRECISION (TREE_TYPE (and1))
6827 <= HOST_BITS_PER_WIDE_INT
6828 && host_integerp (and1, 1))
6830 unsigned HOST_WIDE_INT cst;
6832 cst = tree_low_cst (and1, 1);
6833 cst &= (HOST_WIDE_INT) -1
6834 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6835 change = (cst == 0);
6836 #ifdef LOAD_EXTEND_OP
6838 && !flag_syntax_only
6839 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6842 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6843 and0 = fold_convert (uns, and0);
6844 and1 = fold_convert (uns, and1);
6850 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6851 TREE_INT_CST_HIGH (and1));
6852 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6853 TREE_CONSTANT_OVERFLOW (and1));
6854 return fold_build2 (BIT_AND_EXPR, type,
6855 fold_convert (type, and0), tem);
6859 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6860 T2 being pointers to types of the same size. */
6861 if (POINTER_TYPE_P (type)
6862 && BINARY_CLASS_P (arg0)
6863 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6864 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6866 tree arg00 = TREE_OPERAND (arg0, 0);
6868 tree t1 = TREE_TYPE (arg00);
6869 tree tt0 = TREE_TYPE (t0);
6870 tree tt1 = TREE_TYPE (t1);
6871 tree s0 = TYPE_SIZE (tt0);
6872 tree s1 = TYPE_SIZE (tt1);
6874 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6875 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6876 TREE_OPERAND (arg0, 1));
6879 tem = fold_convert_const (code, type, arg0);
6880 return tem ? tem : NULL_TREE;
6882 case VIEW_CONVERT_EXPR:
6883 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6884 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6888 if (negate_expr_p (arg0))
6889 return fold_convert (type, negate_expr (arg0));
6890 /* Convert - (~A) to A + 1. */
6891 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6892 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6893 build_int_cst (type, 1));
6897 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6898 return fold_abs_const (arg0, type);
6899 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6900 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6901 /* Convert fabs((double)float) into (double)fabsf(float). */
6902 else if (TREE_CODE (arg0) == NOP_EXPR
6903 && TREE_CODE (type) == REAL_TYPE)
6905 tree targ0 = strip_float_extensions (arg0);
6907 return fold_convert (type, fold_build1 (ABS_EXPR,
6911 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6912 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6915 /* Strip sign ops from argument. */
6916 if (TREE_CODE (type) == REAL_TYPE)
6918 tem = fold_strip_sign_ops (arg0);
6920 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6925 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6926 return fold_convert (type, arg0);
6927 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6928 return build2 (COMPLEX_EXPR, type,
6929 TREE_OPERAND (arg0, 0),
6930 negate_expr (TREE_OPERAND (arg0, 1)));
6931 else if (TREE_CODE (arg0) == COMPLEX_CST)
6932 return build_complex (type, TREE_REALPART (arg0),
6933 negate_expr (TREE_IMAGPART (arg0)));
6934 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6935 return fold_build2 (TREE_CODE (arg0), type,
6936 fold_build1 (CONJ_EXPR, type,
6937 TREE_OPERAND (arg0, 0)),
6938 fold_build1 (CONJ_EXPR, type,
6939 TREE_OPERAND (arg0, 1)));
6940 else if (TREE_CODE (arg0) == CONJ_EXPR)
6941 return TREE_OPERAND (arg0, 0);
6945 if (TREE_CODE (arg0) == INTEGER_CST)
6946 return fold_not_const (arg0, type);
6947 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6948 return TREE_OPERAND (arg0, 0);
6949 /* Convert ~ (-A) to A - 1. */
6950 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6951 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6952 build_int_cst (type, 1));
6953 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6954 else if (INTEGRAL_TYPE_P (type)
6955 && ((TREE_CODE (arg0) == MINUS_EXPR
6956 && integer_onep (TREE_OPERAND (arg0, 1)))
6957 || (TREE_CODE (arg0) == PLUS_EXPR
6958 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6959 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6960 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6961 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6962 && (tem = fold_unary (BIT_NOT_EXPR, type,
6964 TREE_OPERAND (arg0, 0)))))
6965 return fold_build2 (BIT_XOR_EXPR, type, tem,
6966 fold_convert (type, TREE_OPERAND (arg0, 1)));
6967 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6968 && (tem = fold_unary (BIT_NOT_EXPR, type,
6970 TREE_OPERAND (arg0, 1)))))
6971 return fold_build2 (BIT_XOR_EXPR, type,
6972 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6976 case TRUTH_NOT_EXPR:
6977 /* The argument to invert_truthvalue must have Boolean type. */
6978 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6979 arg0 = fold_convert (boolean_type_node, arg0);
6981 /* Note that the operand of this must be an int
6982 and its values must be 0 or 1.
6983 ("true" is a fixed value perhaps depending on the language,
6984 but we don't handle values other than 1 correctly yet.) */
6985 tem = invert_truthvalue (arg0);
6986 /* Avoid infinite recursion. */
6987 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6989 return fold_convert (type, tem);
6992 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6994 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6995 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6996 TREE_OPERAND (arg0, 1));
6997 else if (TREE_CODE (arg0) == COMPLEX_CST)
6998 return TREE_REALPART (arg0);
6999 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7000 return fold_build2 (TREE_CODE (arg0), type,
7001 fold_build1 (REALPART_EXPR, type,
7002 TREE_OPERAND (arg0, 0)),
7003 fold_build1 (REALPART_EXPR, type,
7004 TREE_OPERAND (arg0, 1)));
7008 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7009 return fold_convert (type, integer_zero_node);
7010 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7011 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7012 TREE_OPERAND (arg0, 0));
7013 else if (TREE_CODE (arg0) == COMPLEX_CST)
7014 return TREE_IMAGPART (arg0);
7015 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7016 return fold_build2 (TREE_CODE (arg0), type,
7017 fold_build1 (IMAGPART_EXPR, type,
7018 TREE_OPERAND (arg0, 0)),
7019 fold_build1 (IMAGPART_EXPR, type,
7020 TREE_OPERAND (arg0, 1)));
7025 } /* switch (code) */
7028 /* Fold a binary expression of code CODE and type TYPE with operands
7029 OP0 and OP1. Return the folded expression if folding is
7030 successful. Otherwise, return NULL_TREE. */
7033 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7035 tree t1 = NULL_TREE;
7037 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7038 enum tree_code_class kind = TREE_CODE_CLASS (code);
7040 /* WINS will be nonzero when the switch is done
7041 if all operands are constant. */
7044 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7045 && TREE_CODE_LENGTH (code) == 2);
7054 /* Strip any conversions that don't change the mode. This is
7055 safe for every expression, except for a comparison expression
7056 because its signedness is derived from its operands. So, in
7057 the latter case, only strip conversions that don't change the
7060 Note that this is done as an internal manipulation within the
7061 constant folder, in order to find the simplest representation
7062 of the arguments so that their form can be studied. In any
7063 cases, the appropriate type conversions should be put back in
7064 the tree that will get out of the constant folder. */
7065 if (kind == tcc_comparison)
7066 STRIP_SIGN_NOPS (arg0);
7070 if (TREE_CODE (arg0) == COMPLEX_CST)
7071 subop = TREE_REALPART (arg0);
7075 if (TREE_CODE (subop) != INTEGER_CST
7076 && TREE_CODE (subop) != REAL_CST)
7077 /* Note that TREE_CONSTANT isn't enough:
7078 static var addresses are constant but we can't
7079 do arithmetic on them. */
7087 /* Strip any conversions that don't change the mode. This is
7088 safe for every expression, except for a comparison expression
7089 because its signedness is derived from its operands. So, in
7090 the latter case, only strip conversions that don't change the
7093 Note that this is done as an internal manipulation within the
7094 constant folder, in order to find the simplest representation
7095 of the arguments so that their form can be studied. In any
7096 cases, the appropriate type conversions should be put back in
7097 the tree that will get out of the constant folder. */
7098 if (kind == tcc_comparison)
7099 STRIP_SIGN_NOPS (arg1);
7103 if (TREE_CODE (arg1) == COMPLEX_CST)
7104 subop = TREE_REALPART (arg1);
7108 if (TREE_CODE (subop) != INTEGER_CST
7109 && TREE_CODE (subop) != REAL_CST)
7110 /* Note that TREE_CONSTANT isn't enough:
7111 static var addresses are constant but we can't
7112 do arithmetic on them. */
7116 /* If this is a commutative operation, and ARG0 is a constant, move it
7117 to ARG1 to reduce the number of tests below. */
7118 if (commutative_tree_code (code)
7119 && tree_swap_operands_p (arg0, arg1, true))
7120 return fold_build2 (code, type, op1, op0);
7122 /* Now WINS is set as described above,
7123 ARG0 is the first operand of EXPR,
7124 and ARG1 is the second operand (if it has more than one operand).
7126 First check for cases where an arithmetic operation is applied to a
7127 compound, conditional, or comparison operation. Push the arithmetic
7128 operation inside the compound or conditional to see if any folding
7129 can then be done. Convert comparison to conditional for this purpose.
7130 The also optimizes non-constant cases that used to be done in
7133 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7134 one of the operands is a comparison and the other is a comparison, a
7135 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7136 code below would make the expression more complex. Change it to a
7137 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7138 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7140 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7141 || code == EQ_EXPR || code == NE_EXPR)
7142 && ((truth_value_p (TREE_CODE (arg0))
7143 && (truth_value_p (TREE_CODE (arg1))
7144 || (TREE_CODE (arg1) == BIT_AND_EXPR
7145 && integer_onep (TREE_OPERAND (arg1, 1)))))
7146 || (truth_value_p (TREE_CODE (arg1))
7147 && (truth_value_p (TREE_CODE (arg0))
7148 || (TREE_CODE (arg0) == BIT_AND_EXPR
7149 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7151 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7152 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7155 fold_convert (boolean_type_node, arg0),
7156 fold_convert (boolean_type_node, arg1));
7158 if (code == EQ_EXPR)
7159 tem = invert_truthvalue (tem);
7161 return fold_convert (type, tem);
7164 if (TREE_CODE_CLASS (code) == tcc_binary
7165 || TREE_CODE_CLASS (code) == tcc_comparison)
7167 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7168 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7169 fold_build2 (code, type,
7170 TREE_OPERAND (arg0, 1), op1));
7171 if (TREE_CODE (arg1) == COMPOUND_EXPR
7172 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7173 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7174 fold_build2 (code, type,
7175 op0, TREE_OPERAND (arg1, 1)));
7177 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7179 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7181 /*cond_first_p=*/1);
7182 if (tem != NULL_TREE)
7186 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7188 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7190 /*cond_first_p=*/0);
7191 if (tem != NULL_TREE)
7199 /* A + (-B) -> A - B */
7200 if (TREE_CODE (arg1) == NEGATE_EXPR)
7201 return fold_build2 (MINUS_EXPR, type,
7202 fold_convert (type, arg0),
7203 fold_convert (type, TREE_OPERAND (arg1, 0)));
7204 /* (-A) + B -> B - A */
7205 if (TREE_CODE (arg0) == NEGATE_EXPR
7206 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7207 return fold_build2 (MINUS_EXPR, type,
7208 fold_convert (type, arg1),
7209 fold_convert (type, TREE_OPERAND (arg0, 0)));
7210 /* Convert ~A + 1 to -A. */
7211 if (INTEGRAL_TYPE_P (type)
7212 && TREE_CODE (arg0) == BIT_NOT_EXPR
7213 && integer_onep (arg1))
7214 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7216 if (! FLOAT_TYPE_P (type))
7218 if (integer_zerop (arg1))
7219 return non_lvalue (fold_convert (type, arg0));
7221 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7222 with a constant, and the two constants have no bits in common,
7223 we should treat this as a BIT_IOR_EXPR since this may produce more
7225 if (TREE_CODE (arg0) == BIT_AND_EXPR
7226 && TREE_CODE (arg1) == BIT_AND_EXPR
7227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7228 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7229 && integer_zerop (const_binop (BIT_AND_EXPR,
7230 TREE_OPERAND (arg0, 1),
7231 TREE_OPERAND (arg1, 1), 0)))
7233 code = BIT_IOR_EXPR;
7237 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7238 (plus (plus (mult) (mult)) (foo)) so that we can
7239 take advantage of the factoring cases below. */
7240 if (((TREE_CODE (arg0) == PLUS_EXPR
7241 || TREE_CODE (arg0) == MINUS_EXPR)
7242 && TREE_CODE (arg1) == MULT_EXPR)
7243 || ((TREE_CODE (arg1) == PLUS_EXPR
7244 || TREE_CODE (arg1) == MINUS_EXPR)
7245 && TREE_CODE (arg0) == MULT_EXPR))
7247 tree parg0, parg1, parg, marg;
7248 enum tree_code pcode;
7250 if (TREE_CODE (arg1) == MULT_EXPR)
7251 parg = arg0, marg = arg1;
7253 parg = arg1, marg = arg0;
7254 pcode = TREE_CODE (parg);
7255 parg0 = TREE_OPERAND (parg, 0);
7256 parg1 = TREE_OPERAND (parg, 1);
7260 if (TREE_CODE (parg0) == MULT_EXPR
7261 && TREE_CODE (parg1) != MULT_EXPR)
7262 return fold_build2 (pcode, type,
7263 fold_build2 (PLUS_EXPR, type,
7264 fold_convert (type, parg0),
7265 fold_convert (type, marg)),
7266 fold_convert (type, parg1));
7267 if (TREE_CODE (parg0) != MULT_EXPR
7268 && TREE_CODE (parg1) == MULT_EXPR)
7269 return fold_build2 (PLUS_EXPR, type,
7270 fold_convert (type, parg0),
7271 fold_build2 (pcode, type,
7272 fold_convert (type, marg),
7277 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7279 tree arg00, arg01, arg10, arg11;
7280 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7282 /* (A * C) + (B * C) -> (A+B) * C.
7283 We are most concerned about the case where C is a constant,
7284 but other combinations show up during loop reduction. Since
7285 it is not difficult, try all four possibilities. */
7287 arg00 = TREE_OPERAND (arg0, 0);
7288 arg01 = TREE_OPERAND (arg0, 1);
7289 arg10 = TREE_OPERAND (arg1, 0);
7290 arg11 = TREE_OPERAND (arg1, 1);
7293 if (operand_equal_p (arg01, arg11, 0))
7294 same = arg01, alt0 = arg00, alt1 = arg10;
7295 else if (operand_equal_p (arg00, arg10, 0))
7296 same = arg00, alt0 = arg01, alt1 = arg11;
7297 else if (operand_equal_p (arg00, arg11, 0))
7298 same = arg00, alt0 = arg01, alt1 = arg10;
7299 else if (operand_equal_p (arg01, arg10, 0))
7300 same = arg01, alt0 = arg00, alt1 = arg11;
7302 /* No identical multiplicands; see if we can find a common
7303 power-of-two factor in non-power-of-two multiplies. This
7304 can help in multi-dimensional array access. */
7305 else if (TREE_CODE (arg01) == INTEGER_CST
7306 && TREE_CODE (arg11) == INTEGER_CST
7307 && TREE_INT_CST_HIGH (arg01) == 0
7308 && TREE_INT_CST_HIGH (arg11) == 0)
7310 HOST_WIDE_INT int01, int11, tmp;
7311 int01 = TREE_INT_CST_LOW (arg01);
7312 int11 = TREE_INT_CST_LOW (arg11);
7314 /* Move min of absolute values to int11. */
7315 if ((int01 >= 0 ? int01 : -int01)
7316 < (int11 >= 0 ? int11 : -int11))
7318 tmp = int01, int01 = int11, int11 = tmp;
7319 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7320 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7323 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7325 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7326 build_int_cst (NULL_TREE,
7334 return fold_build2 (MULT_EXPR, type,
7335 fold_build2 (PLUS_EXPR, type,
7336 fold_convert (type, alt0),
7337 fold_convert (type, alt1)),
7338 fold_convert (type, same));
7341 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7342 of the array. Loop optimizer sometimes produce this type of
7344 if (TREE_CODE (arg0) == ADDR_EXPR)
7346 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7348 return fold_convert (type, tem);
7350 else if (TREE_CODE (arg1) == ADDR_EXPR)
7352 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7354 return fold_convert (type, tem);
7359 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7360 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7361 return non_lvalue (fold_convert (type, arg0));
7363 /* Likewise if the operands are reversed. */
7364 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7365 return non_lvalue (fold_convert (type, arg1));
7367 /* Convert X + -C into X - C. */
7368 if (TREE_CODE (arg1) == REAL_CST
7369 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7371 tem = fold_negate_const (arg1, type);
7372 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7373 return fold_build2 (MINUS_EXPR, type,
7374 fold_convert (type, arg0),
7375 fold_convert (type, tem));
7378 if (flag_unsafe_math_optimizations
7379 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7380 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7381 && (tem = distribute_real_division (code, type, arg0, arg1)))
7384 /* Convert x+x into x*2.0. */
7385 if (operand_equal_p (arg0, arg1, 0)
7386 && SCALAR_FLOAT_TYPE_P (type))
7387 return fold_build2 (MULT_EXPR, type, arg0,
7388 build_real (type, dconst2));
7390 /* Convert x*c+x into x*(c+1). */
7391 if (flag_unsafe_math_optimizations
7392 && TREE_CODE (arg0) == MULT_EXPR
7393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7394 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7399 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7400 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7401 return fold_build2 (MULT_EXPR, type, arg1,
7402 build_real (type, c));
7405 /* Convert x+x*c into x*(c+1). */
7406 if (flag_unsafe_math_optimizations
7407 && TREE_CODE (arg1) == MULT_EXPR
7408 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7409 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7410 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7414 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7415 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7416 return fold_build2 (MULT_EXPR, type, arg0,
7417 build_real (type, c));
7420 /* Convert x*c1+x*c2 into x*(c1+c2). */
7421 if (flag_unsafe_math_optimizations
7422 && TREE_CODE (arg0) == MULT_EXPR
7423 && TREE_CODE (arg1) == MULT_EXPR
7424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7425 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7426 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7427 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7428 && operand_equal_p (TREE_OPERAND (arg0, 0),
7429 TREE_OPERAND (arg1, 0), 0))
7431 REAL_VALUE_TYPE c1, c2;
7433 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7434 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7435 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7436 return fold_build2 (MULT_EXPR, type,
7437 TREE_OPERAND (arg0, 0),
7438 build_real (type, c1));
7440 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7441 if (flag_unsafe_math_optimizations
7442 && TREE_CODE (arg1) == PLUS_EXPR
7443 && TREE_CODE (arg0) != MULT_EXPR)
7445 tree tree10 = TREE_OPERAND (arg1, 0);
7446 tree tree11 = TREE_OPERAND (arg1, 1);
7447 if (TREE_CODE (tree11) == MULT_EXPR
7448 && TREE_CODE (tree10) == MULT_EXPR)
7451 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7452 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7455 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7456 if (flag_unsafe_math_optimizations
7457 && TREE_CODE (arg0) == PLUS_EXPR
7458 && TREE_CODE (arg1) != MULT_EXPR)
7460 tree tree00 = TREE_OPERAND (arg0, 0);
7461 tree tree01 = TREE_OPERAND (arg0, 1);
7462 if (TREE_CODE (tree01) == MULT_EXPR
7463 && TREE_CODE (tree00) == MULT_EXPR)
7466 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7467 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7473 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7474 is a rotate of A by C1 bits. */
7475 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7476 is a rotate of A by B bits. */
7478 enum tree_code code0, code1;
7479 code0 = TREE_CODE (arg0);
7480 code1 = TREE_CODE (arg1);
7481 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7482 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7483 && operand_equal_p (TREE_OPERAND (arg0, 0),
7484 TREE_OPERAND (arg1, 0), 0)
7485 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7487 tree tree01, tree11;
7488 enum tree_code code01, code11;
7490 tree01 = TREE_OPERAND (arg0, 1);
7491 tree11 = TREE_OPERAND (arg1, 1);
7492 STRIP_NOPS (tree01);
7493 STRIP_NOPS (tree11);
7494 code01 = TREE_CODE (tree01);
7495 code11 = TREE_CODE (tree11);
7496 if (code01 == INTEGER_CST
7497 && code11 == INTEGER_CST
7498 && TREE_INT_CST_HIGH (tree01) == 0
7499 && TREE_INT_CST_HIGH (tree11) == 0
7500 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7501 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7502 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7503 code0 == LSHIFT_EXPR ? tree01 : tree11);
7504 else if (code11 == MINUS_EXPR)
7506 tree tree110, tree111;
7507 tree110 = TREE_OPERAND (tree11, 0);
7508 tree111 = TREE_OPERAND (tree11, 1);
7509 STRIP_NOPS (tree110);
7510 STRIP_NOPS (tree111);
7511 if (TREE_CODE (tree110) == INTEGER_CST
7512 && 0 == compare_tree_int (tree110,
7514 (TREE_TYPE (TREE_OPERAND
7516 && operand_equal_p (tree01, tree111, 0))
7517 return build2 ((code0 == LSHIFT_EXPR
7520 type, TREE_OPERAND (arg0, 0), tree01);
7522 else if (code01 == MINUS_EXPR)
7524 tree tree010, tree011;
7525 tree010 = TREE_OPERAND (tree01, 0);
7526 tree011 = TREE_OPERAND (tree01, 1);
7527 STRIP_NOPS (tree010);
7528 STRIP_NOPS (tree011);
7529 if (TREE_CODE (tree010) == INTEGER_CST
7530 && 0 == compare_tree_int (tree010,
7532 (TREE_TYPE (TREE_OPERAND
7534 && operand_equal_p (tree11, tree011, 0))
7535 return build2 ((code0 != LSHIFT_EXPR
7538 type, TREE_OPERAND (arg0, 0), tree11);
7544 /* In most languages, can't associate operations on floats through
7545 parentheses. Rather than remember where the parentheses were, we
7546 don't associate floats at all, unless the user has specified
7547 -funsafe-math-optimizations. */
7550 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7552 tree var0, con0, lit0, minus_lit0;
7553 tree var1, con1, lit1, minus_lit1;
7555 /* Split both trees into variables, constants, and literals. Then
7556 associate each group together, the constants with literals,
7557 then the result with variables. This increases the chances of
7558 literals being recombined later and of generating relocatable
7559 expressions for the sum of a constant and literal. */
7560 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7561 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7562 code == MINUS_EXPR);
7564 /* Only do something if we found more than two objects. Otherwise,
7565 nothing has changed and we risk infinite recursion. */
7566 if (2 < ((var0 != 0) + (var1 != 0)
7567 + (con0 != 0) + (con1 != 0)
7568 + (lit0 != 0) + (lit1 != 0)
7569 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7571 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7572 if (code == MINUS_EXPR)
7575 var0 = associate_trees (var0, var1, code, type);
7576 con0 = associate_trees (con0, con1, code, type);
7577 lit0 = associate_trees (lit0, lit1, code, type);
7578 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7580 /* Preserve the MINUS_EXPR if the negative part of the literal is
7581 greater than the positive part. Otherwise, the multiplicative
7582 folding code (i.e extract_muldiv) may be fooled in case
7583 unsigned constants are subtracted, like in the following
7584 example: ((X*2 + 4) - 8U)/2. */
7585 if (minus_lit0 && lit0)
7587 if (TREE_CODE (lit0) == INTEGER_CST
7588 && TREE_CODE (minus_lit0) == INTEGER_CST
7589 && tree_int_cst_lt (lit0, minus_lit0))
7591 minus_lit0 = associate_trees (minus_lit0, lit0,
7597 lit0 = associate_trees (lit0, minus_lit0,
7605 return fold_convert (type,
7606 associate_trees (var0, minus_lit0,
7610 con0 = associate_trees (con0, minus_lit0,
7612 return fold_convert (type,
7613 associate_trees (var0, con0,
7618 con0 = associate_trees (con0, lit0, code, type);
7619 return fold_convert (type, associate_trees (var0, con0,
7626 t1 = const_binop (code, arg0, arg1, 0);
7627 if (t1 != NULL_TREE)
7629 /* The return value should always have
7630 the same type as the original expression. */
7631 if (TREE_TYPE (t1) != type)
7632 t1 = fold_convert (type, t1);
7639 /* A - (-B) -> A + B */
7640 if (TREE_CODE (arg1) == NEGATE_EXPR)
7641 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7642 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7643 if (TREE_CODE (arg0) == NEGATE_EXPR
7644 && (FLOAT_TYPE_P (type)
7645 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7646 && negate_expr_p (arg1)
7647 && reorder_operands_p (arg0, arg1))
7648 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7649 TREE_OPERAND (arg0, 0));
7650 /* Convert -A - 1 to ~A. */
7651 if (INTEGRAL_TYPE_P (type)
7652 && TREE_CODE (arg0) == NEGATE_EXPR
7653 && integer_onep (arg1))
7654 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7656 /* Convert -1 - A to ~A. */
7657 if (INTEGRAL_TYPE_P (type)
7658 && integer_all_onesp (arg0))
7659 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7661 if (! FLOAT_TYPE_P (type))
7663 if (! wins && integer_zerop (arg0))
7664 return negate_expr (fold_convert (type, arg1));
7665 if (integer_zerop (arg1))
7666 return non_lvalue (fold_convert (type, arg0));
7668 /* Fold A - (A & B) into ~B & A. */
7669 if (!TREE_SIDE_EFFECTS (arg0)
7670 && TREE_CODE (arg1) == BIT_AND_EXPR)
7672 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7673 return fold_build2 (BIT_AND_EXPR, type,
7674 fold_build1 (BIT_NOT_EXPR, type,
7675 TREE_OPERAND (arg1, 0)),
7677 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7678 return fold_build2 (BIT_AND_EXPR, type,
7679 fold_build1 (BIT_NOT_EXPR, type,
7680 TREE_OPERAND (arg1, 1)),
7684 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7685 any power of 2 minus 1. */
7686 if (TREE_CODE (arg0) == BIT_AND_EXPR
7687 && TREE_CODE (arg1) == BIT_AND_EXPR
7688 && operand_equal_p (TREE_OPERAND (arg0, 0),
7689 TREE_OPERAND (arg1, 0), 0))
7691 tree mask0 = TREE_OPERAND (arg0, 1);
7692 tree mask1 = TREE_OPERAND (arg1, 1);
7693 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7695 if (operand_equal_p (tem, mask1, 0))
7697 tem = fold_build2 (BIT_XOR_EXPR, type,
7698 TREE_OPERAND (arg0, 0), mask1);
7699 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7704 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7705 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7706 return non_lvalue (fold_convert (type, arg0));
7708 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7709 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7710 (-ARG1 + ARG0) reduces to -ARG1. */
7711 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7712 return negate_expr (fold_convert (type, arg1));
7714 /* Fold &x - &x. This can happen from &x.foo - &x.
7715 This is unsafe for certain floats even in non-IEEE formats.
7716 In IEEE, it is unsafe because it does wrong for NaNs.
7717 Also note that operand_equal_p is always false if an operand
7720 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7721 && operand_equal_p (arg0, arg1, 0))
7722 return fold_convert (type, integer_zero_node);
7724 /* A - B -> A + (-B) if B is easily negatable. */
7725 if (!wins && negate_expr_p (arg1)
7726 && ((FLOAT_TYPE_P (type)
7727 /* Avoid this transformation if B is a positive REAL_CST. */
7728 && (TREE_CODE (arg1) != REAL_CST
7729 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7730 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7731 return fold_build2 (PLUS_EXPR, type,
7732 fold_convert (type, arg0),
7733 fold_convert (type, negate_expr (arg1)));
7735 /* Try folding difference of addresses. */
7739 if ((TREE_CODE (arg0) == ADDR_EXPR
7740 || TREE_CODE (arg1) == ADDR_EXPR)
7741 && ptr_difference_const (arg0, arg1, &diff))
7742 return build_int_cst_type (type, diff);
7745 /* Fold &a[i] - &a[j] to i-j. */
7746 if (TREE_CODE (arg0) == ADDR_EXPR
7747 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7748 && TREE_CODE (arg1) == ADDR_EXPR
7749 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7751 tree aref0 = TREE_OPERAND (arg0, 0);
7752 tree aref1 = TREE_OPERAND (arg1, 0);
7753 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7754 TREE_OPERAND (aref1, 0), 0))
7756 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7757 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7758 tree esz = array_ref_element_size (aref0);
7759 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7760 return fold_build2 (MULT_EXPR, type, diff,
7761 fold_convert (type, esz));
7766 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7767 of the array. Loop optimizer sometimes produce this type of
7769 if (TREE_CODE (arg0) == ADDR_EXPR)
7771 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7773 return fold_convert (type, tem);
7776 if (flag_unsafe_math_optimizations
7777 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7778 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7779 && (tem = distribute_real_division (code, type, arg0, arg1)))
7782 if (TREE_CODE (arg0) == MULT_EXPR
7783 && TREE_CODE (arg1) == MULT_EXPR
7784 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7786 /* (A * C) - (B * C) -> (A-B) * C. */
7787 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7788 TREE_OPERAND (arg1, 1), 0))
7789 return fold_build2 (MULT_EXPR, type,
7790 fold_build2 (MINUS_EXPR, type,
7791 TREE_OPERAND (arg0, 0),
7792 TREE_OPERAND (arg1, 0)),
7793 TREE_OPERAND (arg0, 1));
7794 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7795 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7796 TREE_OPERAND (arg1, 0), 0))
7797 return fold_build2 (MULT_EXPR, type,
7798 TREE_OPERAND (arg0, 0),
7799 fold_build2 (MINUS_EXPR, type,
7800 TREE_OPERAND (arg0, 1),
7801 TREE_OPERAND (arg1, 1)));
7807 /* (-A) * (-B) -> A * B */
7808 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7809 return fold_build2 (MULT_EXPR, type,
7810 fold_convert (type, TREE_OPERAND (arg0, 0)),
7811 fold_convert (type, negate_expr (arg1)));
7812 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7813 return fold_build2 (MULT_EXPR, type,
7814 fold_convert (type, negate_expr (arg0)),
7815 fold_convert (type, TREE_OPERAND (arg1, 0)));
7817 if (! FLOAT_TYPE_P (type))
7819 if (integer_zerop (arg1))
7820 return omit_one_operand (type, arg1, arg0);
7821 if (integer_onep (arg1))
7822 return non_lvalue (fold_convert (type, arg0));
7823 /* Transform x * -1 into -x. */
7824 if (integer_all_onesp (arg1))
7825 return fold_convert (type, negate_expr (arg0));
7827 /* (a * (1 << b)) is (a << b) */
7828 if (TREE_CODE (arg1) == LSHIFT_EXPR
7829 && integer_onep (TREE_OPERAND (arg1, 0)))
7830 return fold_build2 (LSHIFT_EXPR, type, arg0,
7831 TREE_OPERAND (arg1, 1));
7832 if (TREE_CODE (arg0) == LSHIFT_EXPR
7833 && integer_onep (TREE_OPERAND (arg0, 0)))
7834 return fold_build2 (LSHIFT_EXPR, type, arg1,
7835 TREE_OPERAND (arg0, 1));
7837 if (TREE_CODE (arg1) == INTEGER_CST
7838 && 0 != (tem = extract_muldiv (op0,
7839 fold_convert (type, arg1),
7841 return fold_convert (type, tem);
7846 /* Maybe fold x * 0 to 0. The expressions aren't the same
7847 when x is NaN, since x * 0 is also NaN. Nor are they the
7848 same in modes with signed zeros, since multiplying a
7849 negative value by 0 gives -0, not +0. */
7850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7851 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7852 && real_zerop (arg1))
7853 return omit_one_operand (type, arg1, arg0);
7854 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7855 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7856 && real_onep (arg1))
7857 return non_lvalue (fold_convert (type, arg0));
7859 /* Transform x * -1.0 into -x. */
7860 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7861 && real_minus_onep (arg1))
7862 return fold_convert (type, negate_expr (arg0));
7864 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7865 if (flag_unsafe_math_optimizations
7866 && TREE_CODE (arg0) == RDIV_EXPR
7867 && TREE_CODE (arg1) == REAL_CST
7868 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7870 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7873 return fold_build2 (RDIV_EXPR, type, tem,
7874 TREE_OPERAND (arg0, 1));
7877 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7878 if (operand_equal_p (arg0, arg1, 0))
7880 tree tem = fold_strip_sign_ops (arg0);
7881 if (tem != NULL_TREE)
7883 tem = fold_convert (type, tem);
7884 return fold_build2 (MULT_EXPR, type, tem, tem);
7888 if (flag_unsafe_math_optimizations)
7890 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7891 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7893 /* Optimizations of root(...)*root(...). */
7894 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7896 tree rootfn, arg, arglist;
7897 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7898 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7900 /* Optimize sqrt(x)*sqrt(x) as x. */
7901 if (BUILTIN_SQRT_P (fcode0)
7902 && operand_equal_p (arg00, arg10, 0)
7903 && ! HONOR_SNANS (TYPE_MODE (type)))
7906 /* Optimize root(x)*root(y) as root(x*y). */
7907 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7908 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7909 arglist = build_tree_list (NULL_TREE, arg);
7910 return build_function_call_expr (rootfn, arglist);
7913 /* Optimize expN(x)*expN(y) as expN(x+y). */
7914 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7916 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7917 tree arg = fold_build2 (PLUS_EXPR, type,
7918 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7919 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7920 tree arglist = build_tree_list (NULL_TREE, arg);
7921 return build_function_call_expr (expfn, arglist);
7924 /* Optimizations of pow(...)*pow(...). */
7925 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7926 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7927 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7929 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7930 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7932 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7933 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7936 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7937 if (operand_equal_p (arg01, arg11, 0))
7939 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7940 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7941 tree arglist = tree_cons (NULL_TREE, arg,
7942 build_tree_list (NULL_TREE,
7944 return build_function_call_expr (powfn, arglist);
7947 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7948 if (operand_equal_p (arg00, arg10, 0))
7950 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7951 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7952 tree arglist = tree_cons (NULL_TREE, arg00,
7953 build_tree_list (NULL_TREE,
7955 return build_function_call_expr (powfn, arglist);
7959 /* Optimize tan(x)*cos(x) as sin(x). */
7960 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7961 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7962 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7963 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7964 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7965 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7966 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7967 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7969 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7971 if (sinfn != NULL_TREE)
7972 return build_function_call_expr (sinfn,
7973 TREE_OPERAND (arg0, 1));
7976 /* Optimize x*pow(x,c) as pow(x,c+1). */
7977 if (fcode1 == BUILT_IN_POW
7978 || fcode1 == BUILT_IN_POWF
7979 || fcode1 == BUILT_IN_POWL)
7981 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7982 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7984 if (TREE_CODE (arg11) == REAL_CST
7985 && ! TREE_CONSTANT_OVERFLOW (arg11)
7986 && operand_equal_p (arg0, arg10, 0))
7988 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7992 c = TREE_REAL_CST (arg11);
7993 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7994 arg = build_real (type, c);
7995 arglist = build_tree_list (NULL_TREE, arg);
7996 arglist = tree_cons (NULL_TREE, arg0, arglist);
7997 return build_function_call_expr (powfn, arglist);
8001 /* Optimize pow(x,c)*x as pow(x,c+1). */
8002 if (fcode0 == BUILT_IN_POW
8003 || fcode0 == BUILT_IN_POWF
8004 || fcode0 == BUILT_IN_POWL)
8006 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8007 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8009 if (TREE_CODE (arg01) == REAL_CST
8010 && ! TREE_CONSTANT_OVERFLOW (arg01)
8011 && operand_equal_p (arg1, arg00, 0))
8013 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8017 c = TREE_REAL_CST (arg01);
8018 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8019 arg = build_real (type, c);
8020 arglist = build_tree_list (NULL_TREE, arg);
8021 arglist = tree_cons (NULL_TREE, arg1, arglist);
8022 return build_function_call_expr (powfn, arglist);
8026 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8028 && operand_equal_p (arg0, arg1, 0))
8030 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8034 tree arg = build_real (type, dconst2);
8035 tree arglist = build_tree_list (NULL_TREE, arg);
8036 arglist = tree_cons (NULL_TREE, arg0, arglist);
8037 return build_function_call_expr (powfn, arglist);
8046 if (integer_all_onesp (arg1))
8047 return omit_one_operand (type, arg1, arg0);
8048 if (integer_zerop (arg1))
8049 return non_lvalue (fold_convert (type, arg0));
8050 if (operand_equal_p (arg0, arg1, 0))
8051 return non_lvalue (fold_convert (type, arg0));
8054 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8055 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8057 t1 = build_int_cst (type, -1);
8058 t1 = force_fit_type (t1, 0, false, false);
8059 return omit_one_operand (type, t1, arg1);
8063 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8064 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8066 t1 = build_int_cst (type, -1);
8067 t1 = force_fit_type (t1, 0, false, false);
8068 return omit_one_operand (type, t1, arg0);
8071 t1 = distribute_bit_expr (code, type, arg0, arg1);
8072 if (t1 != NULL_TREE)
8075 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8077 This results in more efficient code for machines without a NAND
8078 instruction. Combine will canonicalize to the first form
8079 which will allow use of NAND instructions provided by the
8080 backend if they exist. */
8081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8082 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8084 return fold_build1 (BIT_NOT_EXPR, type,
8085 build2 (BIT_AND_EXPR, type,
8086 TREE_OPERAND (arg0, 0),
8087 TREE_OPERAND (arg1, 0)));
8090 /* See if this can be simplified into a rotate first. If that
8091 is unsuccessful continue in the association code. */
8095 if (integer_zerop (arg1))
8096 return non_lvalue (fold_convert (type, arg0));
8097 if (integer_all_onesp (arg1))
8098 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8099 if (operand_equal_p (arg0, arg1, 0))
8100 return omit_one_operand (type, integer_zero_node, arg0);
8103 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8106 t1 = build_int_cst (type, -1);
8107 t1 = force_fit_type (t1, 0, false, false);
8108 return omit_one_operand (type, t1, arg1);
8112 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8113 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8115 t1 = build_int_cst (type, -1);
8116 t1 = force_fit_type (t1, 0, false, false);
8117 return omit_one_operand (type, t1, arg0);
8120 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8121 with a constant, and the two constants have no bits in common,
8122 we should treat this as a BIT_IOR_EXPR since this may produce more
8124 if (TREE_CODE (arg0) == BIT_AND_EXPR
8125 && TREE_CODE (arg1) == BIT_AND_EXPR
8126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8127 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8128 && integer_zerop (const_binop (BIT_AND_EXPR,
8129 TREE_OPERAND (arg0, 1),
8130 TREE_OPERAND (arg1, 1), 0)))
8132 code = BIT_IOR_EXPR;
8136 /* (X | Y) ^ X -> Y & ~ X*/
8137 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8138 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8140 tree t2 = TREE_OPERAND (arg0, 1);
8141 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8143 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8144 fold_convert (type, t1));
8148 /* (Y | X) ^ X -> Y & ~ X*/
8149 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8150 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8152 tree t2 = TREE_OPERAND (arg0, 0);
8153 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8155 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8156 fold_convert (type, t1));
8160 /* X ^ (X | Y) -> Y & ~ X*/
8161 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8162 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8164 tree t2 = TREE_OPERAND (arg1, 1);
8165 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8167 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8168 fold_convert (type, t1));
8172 /* X ^ (Y | X) -> Y & ~ X*/
8173 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8174 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8176 tree t2 = TREE_OPERAND (arg1, 0);
8177 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8179 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8180 fold_convert (type, t1));
8184 /* Convert ~X ^ ~Y to X ^ Y. */
8185 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8186 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8187 return fold_build2 (code, type,
8188 fold_convert (type, TREE_OPERAND (arg0, 0)),
8189 fold_convert (type, TREE_OPERAND (arg1, 0)));
8191 /* See if this can be simplified into a rotate first. If that
8192 is unsuccessful continue in the association code. */
8196 if (integer_all_onesp (arg1))
8197 return non_lvalue (fold_convert (type, arg0));
8198 if (integer_zerop (arg1))
8199 return omit_one_operand (type, arg1, arg0);
8200 if (operand_equal_p (arg0, arg1, 0))
8201 return non_lvalue (fold_convert (type, arg0));
8203 /* ~X & X is always zero. */
8204 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8206 return omit_one_operand (type, integer_zero_node, arg1);
8208 /* X & ~X is always zero. */
8209 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8211 return omit_one_operand (type, integer_zero_node, arg0);
8213 t1 = distribute_bit_expr (code, type, arg0, arg1);
8214 if (t1 != NULL_TREE)
8216 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8217 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8218 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8221 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8223 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8224 && (~TREE_INT_CST_LOW (arg1)
8225 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8226 return fold_convert (type, TREE_OPERAND (arg0, 0));
8229 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8231 This results in more efficient code for machines without a NOR
8232 instruction. Combine will canonicalize to the first form
8233 which will allow use of NOR instructions provided by the
8234 backend if they exist. */
8235 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8236 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8238 return fold_build1 (BIT_NOT_EXPR, type,
8239 build2 (BIT_IOR_EXPR, type,
8240 TREE_OPERAND (arg0, 0),
8241 TREE_OPERAND (arg1, 0)));
8247 /* Don't touch a floating-point divide by zero unless the mode
8248 of the constant can represent infinity. */
8249 if (TREE_CODE (arg1) == REAL_CST
8250 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8251 && real_zerop (arg1))
8254 /* (-A) / (-B) -> A / B */
8255 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8256 return fold_build2 (RDIV_EXPR, type,
8257 TREE_OPERAND (arg0, 0),
8258 negate_expr (arg1));
8259 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8260 return fold_build2 (RDIV_EXPR, type,
8262 TREE_OPERAND (arg1, 0));
8264 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8265 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8266 && real_onep (arg1))
8267 return non_lvalue (fold_convert (type, arg0));
8269 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8270 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8271 && real_minus_onep (arg1))
8272 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8274 /* If ARG1 is a constant, we can convert this to a multiply by the
8275 reciprocal. This does not have the same rounding properties,
8276 so only do this if -funsafe-math-optimizations. We can actually
8277 always safely do it if ARG1 is a power of two, but it's hard to
8278 tell if it is or not in a portable manner. */
8279 if (TREE_CODE (arg1) == REAL_CST)
8281 if (flag_unsafe_math_optimizations
8282 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8284 return fold_build2 (MULT_EXPR, type, arg0, tem);
8285 /* Find the reciprocal if optimizing and the result is exact. */
8289 r = TREE_REAL_CST (arg1);
8290 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8292 tem = build_real (type, r);
8293 return fold_build2 (MULT_EXPR, type,
8294 fold_convert (type, arg0), tem);
8298 /* Convert A/B/C to A/(B*C). */
8299 if (flag_unsafe_math_optimizations
8300 && TREE_CODE (arg0) == RDIV_EXPR)
8301 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8302 fold_build2 (MULT_EXPR, type,
8303 TREE_OPERAND (arg0, 1), arg1));
8305 /* Convert A/(B/C) to (A/B)*C. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1) == RDIV_EXPR)
8308 return fold_build2 (MULT_EXPR, type,
8309 fold_build2 (RDIV_EXPR, type, arg0,
8310 TREE_OPERAND (arg1, 0)),
8311 TREE_OPERAND (arg1, 1));
8313 /* Convert C1/(X*C2) into (C1/C2)/X. */
8314 if (flag_unsafe_math_optimizations
8315 && TREE_CODE (arg1) == MULT_EXPR
8316 && TREE_CODE (arg0) == REAL_CST
8317 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8319 tree tem = const_binop (RDIV_EXPR, arg0,
8320 TREE_OPERAND (arg1, 1), 0);
8322 return fold_build2 (RDIV_EXPR, type, tem,
8323 TREE_OPERAND (arg1, 0));
8326 if (flag_unsafe_math_optimizations)
8328 enum built_in_function fcode = builtin_mathfn_code (arg1);
8329 /* Optimize x/expN(y) into x*expN(-y). */
8330 if (BUILTIN_EXPONENT_P (fcode))
8332 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8333 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8334 tree arglist = build_tree_list (NULL_TREE,
8335 fold_convert (type, arg));
8336 arg1 = build_function_call_expr (expfn, arglist);
8337 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8340 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8341 if (fcode == BUILT_IN_POW
8342 || fcode == BUILT_IN_POWF
8343 || fcode == BUILT_IN_POWL)
8345 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8346 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8347 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8348 tree neg11 = fold_convert (type, negate_expr (arg11));
8349 tree arglist = tree_cons(NULL_TREE, arg10,
8350 build_tree_list (NULL_TREE, neg11));
8351 arg1 = build_function_call_expr (powfn, arglist);
8352 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8356 if (flag_unsafe_math_optimizations)
8358 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8359 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8361 /* Optimize sin(x)/cos(x) as tan(x). */
8362 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8363 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8364 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8365 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8366 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8368 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8370 if (tanfn != NULL_TREE)
8371 return build_function_call_expr (tanfn,
8372 TREE_OPERAND (arg0, 1));
8375 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8376 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8377 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8378 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8379 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8380 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8382 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8384 if (tanfn != NULL_TREE)
8386 tree tmp = TREE_OPERAND (arg0, 1);
8387 tmp = build_function_call_expr (tanfn, tmp);
8388 return fold_build2 (RDIV_EXPR, type,
8389 build_real (type, dconst1), tmp);
8393 /* Optimize pow(x,c)/x as pow(x,c-1). */
8394 if (fcode0 == BUILT_IN_POW
8395 || fcode0 == BUILT_IN_POWF
8396 || fcode0 == BUILT_IN_POWL)
8398 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8399 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8400 if (TREE_CODE (arg01) == REAL_CST
8401 && ! TREE_CONSTANT_OVERFLOW (arg01)
8402 && operand_equal_p (arg1, arg00, 0))
8404 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8408 c = TREE_REAL_CST (arg01);
8409 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8410 arg = build_real (type, c);
8411 arglist = build_tree_list (NULL_TREE, arg);
8412 arglist = tree_cons (NULL_TREE, arg1, arglist);
8413 return build_function_call_expr (powfn, arglist);
8419 case TRUNC_DIV_EXPR:
8420 case ROUND_DIV_EXPR:
8421 case FLOOR_DIV_EXPR:
8423 case EXACT_DIV_EXPR:
8424 if (integer_onep (arg1))
8425 return non_lvalue (fold_convert (type, arg0));
8426 if (integer_zerop (arg1))
8429 if (!TYPE_UNSIGNED (type)
8430 && TREE_CODE (arg1) == INTEGER_CST
8431 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8432 && TREE_INT_CST_HIGH (arg1) == -1)
8433 return fold_convert (type, negate_expr (arg0));
8435 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8436 operation, EXACT_DIV_EXPR.
8438 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8439 At one time others generated faster code, it's not clear if they do
8440 after the last round to changes to the DIV code in expmed.c. */
8441 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8442 && multiple_of_p (type, arg0, arg1))
8443 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8445 if (TREE_CODE (arg1) == INTEGER_CST
8446 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8447 return fold_convert (type, tem);
8452 case FLOOR_MOD_EXPR:
8453 case ROUND_MOD_EXPR:
8454 case TRUNC_MOD_EXPR:
8455 /* X % 1 is always zero, but be sure to preserve any side
8457 if (integer_onep (arg1))
8458 return omit_one_operand (type, integer_zero_node, arg0);
8460 /* X % 0, return X % 0 unchanged so that we can get the
8461 proper warnings and errors. */
8462 if (integer_zerop (arg1))
8465 /* 0 % X is always zero, but be sure to preserve any side
8466 effects in X. Place this after checking for X == 0. */
8467 if (integer_zerop (arg0))
8468 return omit_one_operand (type, integer_zero_node, arg1);
8470 /* X % -1 is zero. */
8471 if (!TYPE_UNSIGNED (type)
8472 && TREE_CODE (arg1) == INTEGER_CST
8473 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8474 && TREE_INT_CST_HIGH (arg1) == -1)
8475 return omit_one_operand (type, integer_zero_node, arg0);
8477 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8478 i.e. "X % C" into "X & C2", if X and C are positive. */
8479 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8480 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8481 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8483 unsigned HOST_WIDE_INT high, low;
8487 l = tree_log2 (arg1);
8488 if (l >= HOST_BITS_PER_WIDE_INT)
8490 high = ((unsigned HOST_WIDE_INT) 1
8491 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8497 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8500 mask = build_int_cst_wide (type, low, high);
8501 return fold_build2 (BIT_AND_EXPR, type,
8502 fold_convert (type, arg0), mask);
8505 /* X % -C is the same as X % C. */
8506 if (code == TRUNC_MOD_EXPR
8507 && !TYPE_UNSIGNED (type)
8508 && TREE_CODE (arg1) == INTEGER_CST
8509 && !TREE_CONSTANT_OVERFLOW (arg1)
8510 && TREE_INT_CST_HIGH (arg1) < 0
8512 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8513 && !sign_bit_p (arg1, arg1))
8514 return fold_build2 (code, type, fold_convert (type, arg0),
8515 fold_convert (type, negate_expr (arg1)));
8517 /* X % -Y is the same as X % Y. */
8518 if (code == TRUNC_MOD_EXPR
8519 && !TYPE_UNSIGNED (type)
8520 && TREE_CODE (arg1) == NEGATE_EXPR
8522 return fold_build2 (code, type, fold_convert (type, arg0),
8523 fold_convert (type, TREE_OPERAND (arg1, 0)));
8525 if (TREE_CODE (arg1) == INTEGER_CST
8526 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8527 return fold_convert (type, tem);
8533 if (integer_all_onesp (arg0))
8534 return omit_one_operand (type, arg0, arg1);
8538 /* Optimize -1 >> x for arithmetic right shifts. */
8539 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8540 return omit_one_operand (type, arg0, arg1);
8541 /* ... fall through ... */
8545 if (integer_zerop (arg1))
8546 return non_lvalue (fold_convert (type, arg0));
8547 if (integer_zerop (arg0))
8548 return omit_one_operand (type, arg0, arg1);
8550 /* Since negative shift count is not well-defined,
8551 don't try to compute it in the compiler. */
8552 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8555 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8556 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
8557 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8558 && host_integerp (TREE_OPERAND (arg0, 1), false)
8559 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8561 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8562 + TREE_INT_CST_LOW (arg1));
8564 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8565 being well defined. */
8566 if (low >= TYPE_PRECISION (type))
8568 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8569 low = low % TYPE_PRECISION (type);
8570 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8571 return build_int_cst (type, 0);
8573 low = TYPE_PRECISION (type) - 1;
8576 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8577 build_int_cst (type, low));
8580 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8581 into x & ((unsigned)-1 >> c) for unsigned types. */
8582 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8583 || (TYPE_UNSIGNED (type)
8584 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8585 && host_integerp (arg1, false)
8586 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8587 && host_integerp (TREE_OPERAND (arg0, 1), false)
8588 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8590 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8591 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8597 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8599 lshift = build_int_cst (type, -1);
8600 lshift = int_const_binop (code, lshift, arg1, 0);
8602 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8606 /* Rewrite an LROTATE_EXPR by a constant into an
8607 RROTATE_EXPR by a new constant. */
8608 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8610 tree tem = build_int_cst (NULL_TREE,
8611 GET_MODE_BITSIZE (TYPE_MODE (type)));
8612 tem = fold_convert (TREE_TYPE (arg1), tem);
8613 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8614 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8617 /* If we have a rotate of a bit operation with the rotate count and
8618 the second operand of the bit operation both constant,
8619 permute the two operations. */
8620 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8621 && (TREE_CODE (arg0) == BIT_AND_EXPR
8622 || TREE_CODE (arg0) == BIT_IOR_EXPR
8623 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8624 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8625 return fold_build2 (TREE_CODE (arg0), type,
8626 fold_build2 (code, type,
8627 TREE_OPERAND (arg0, 0), arg1),
8628 fold_build2 (code, type,
8629 TREE_OPERAND (arg0, 1), arg1));
8631 /* Two consecutive rotates adding up to the width of the mode can
8633 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8634 && TREE_CODE (arg0) == RROTATE_EXPR
8635 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8636 && TREE_INT_CST_HIGH (arg1) == 0
8637 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8638 && ((TREE_INT_CST_LOW (arg1)
8639 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8640 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8641 return TREE_OPERAND (arg0, 0);
8646 if (operand_equal_p (arg0, arg1, 0))
8647 return omit_one_operand (type, arg0, arg1);
8648 if (INTEGRAL_TYPE_P (type)
8649 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8650 return omit_one_operand (type, arg1, arg0);
8654 if (operand_equal_p (arg0, arg1, 0))
8655 return omit_one_operand (type, arg0, arg1);
8656 if (INTEGRAL_TYPE_P (type)
8657 && TYPE_MAX_VALUE (type)
8658 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8659 return omit_one_operand (type, arg1, arg0);
8662 case TRUTH_ANDIF_EXPR:
8663 /* Note that the operands of this must be ints
8664 and their values must be 0 or 1.
8665 ("true" is a fixed value perhaps depending on the language.) */
8666 /* If first arg is constant zero, return it. */
8667 if (integer_zerop (arg0))
8668 return fold_convert (type, arg0);
8669 case TRUTH_AND_EXPR:
8670 /* If either arg is constant true, drop it. */
8671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8672 return non_lvalue (fold_convert (type, arg1));
8673 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8674 /* Preserve sequence points. */
8675 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8676 return non_lvalue (fold_convert (type, arg0));
8677 /* If second arg is constant zero, result is zero, but first arg
8678 must be evaluated. */
8679 if (integer_zerop (arg1))
8680 return omit_one_operand (type, arg1, arg0);
8681 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8682 case will be handled here. */
8683 if (integer_zerop (arg0))
8684 return omit_one_operand (type, arg0, arg1);
8686 /* !X && X is always false. */
8687 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8688 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8689 return omit_one_operand (type, integer_zero_node, arg1);
8690 /* X && !X is always false. */
8691 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8693 return omit_one_operand (type, integer_zero_node, arg0);
8695 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8696 means A >= Y && A != MAX, but in this case we know that
8699 if (!TREE_SIDE_EFFECTS (arg0)
8700 && !TREE_SIDE_EFFECTS (arg1))
8702 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8703 if (tem && !operand_equal_p (tem, arg0, 0))
8704 return fold_build2 (code, type, tem, arg1);
8706 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8707 if (tem && !operand_equal_p (tem, arg1, 0))
8708 return fold_build2 (code, type, arg0, tem);
8712 /* We only do these simplifications if we are optimizing. */
8716 /* Check for things like (A || B) && (A || C). We can convert this
8717 to A || (B && C). Note that either operator can be any of the four
8718 truth and/or operations and the transformation will still be
8719 valid. Also note that we only care about order for the
8720 ANDIF and ORIF operators. If B contains side effects, this
8721 might change the truth-value of A. */
8722 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8723 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8724 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8725 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8726 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8727 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8729 tree a00 = TREE_OPERAND (arg0, 0);
8730 tree a01 = TREE_OPERAND (arg0, 1);
8731 tree a10 = TREE_OPERAND (arg1, 0);
8732 tree a11 = TREE_OPERAND (arg1, 1);
8733 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8734 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8735 && (code == TRUTH_AND_EXPR
8736 || code == TRUTH_OR_EXPR));
8738 if (operand_equal_p (a00, a10, 0))
8739 return fold_build2 (TREE_CODE (arg0), type, a00,
8740 fold_build2 (code, type, a01, a11));
8741 else if (commutative && operand_equal_p (a00, a11, 0))
8742 return fold_build2 (TREE_CODE (arg0), type, a00,
8743 fold_build2 (code, type, a01, a10));
8744 else if (commutative && operand_equal_p (a01, a10, 0))
8745 return fold_build2 (TREE_CODE (arg0), type, a01,
8746 fold_build2 (code, type, a00, a11));
8748 /* This case if tricky because we must either have commutative
8749 operators or else A10 must not have side-effects. */
8751 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8752 && operand_equal_p (a01, a11, 0))
8753 return fold_build2 (TREE_CODE (arg0), type,
8754 fold_build2 (code, type, a00, a10),
8758 /* See if we can build a range comparison. */
8759 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8762 /* Check for the possibility of merging component references. If our
8763 lhs is another similar operation, try to merge its rhs with our
8764 rhs. Then try to merge our lhs and rhs. */
8765 if (TREE_CODE (arg0) == code
8766 && 0 != (tem = fold_truthop (code, type,
8767 TREE_OPERAND (arg0, 1), arg1)))
8768 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8770 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8775 case TRUTH_ORIF_EXPR:
8776 /* Note that the operands of this must be ints
8777 and their values must be 0 or true.
8778 ("true" is a fixed value perhaps depending on the language.) */
8779 /* If first arg is constant true, return it. */
8780 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8781 return fold_convert (type, arg0);
8783 /* If either arg is constant zero, drop it. */
8784 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8785 return non_lvalue (fold_convert (type, arg1));
8786 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8787 /* Preserve sequence points. */
8788 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8789 return non_lvalue (fold_convert (type, arg0));
8790 /* If second arg is constant true, result is true, but we must
8791 evaluate first arg. */
8792 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8793 return omit_one_operand (type, arg1, arg0);
8794 /* Likewise for first arg, but note this only occurs here for
8796 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8797 return omit_one_operand (type, arg0, arg1);
8799 /* !X || X is always true. */
8800 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8801 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8802 return omit_one_operand (type, integer_one_node, arg1);
8803 /* X || !X is always true. */
8804 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8805 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8806 return omit_one_operand (type, integer_one_node, arg0);
8810 case TRUTH_XOR_EXPR:
8811 /* If the second arg is constant zero, drop it. */
8812 if (integer_zerop (arg1))
8813 return non_lvalue (fold_convert (type, arg0));
8814 /* If the second arg is constant true, this is a logical inversion. */
8815 if (integer_onep (arg1))
8817 /* Only call invert_truthvalue if operand is a truth value. */
8818 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8819 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8821 tem = invert_truthvalue (arg0);
8822 return non_lvalue (fold_convert (type, tem));
8824 /* Identical arguments cancel to zero. */
8825 if (operand_equal_p (arg0, arg1, 0))
8826 return omit_one_operand (type, integer_zero_node, arg0);
8828 /* !X ^ X is always true. */
8829 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8830 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8831 return omit_one_operand (type, integer_one_node, arg1);
8833 /* X ^ !X is always true. */
8834 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8836 return omit_one_operand (type, integer_one_node, arg0);
8846 /* If one arg is a real or integer constant, put it last. */
8847 if (tree_swap_operands_p (arg0, arg1, true))
8848 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8850 /* bool_var != 0 becomes bool_var. */
8851 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8853 return non_lvalue (fold_convert (type, arg0));
8855 /* bool_var == 1 becomes bool_var. */
8856 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8858 return non_lvalue (fold_convert (type, arg0));
8860 /* If this is an equality comparison of the address of a non-weak
8861 object against zero, then we know the result. */
8862 if ((code == EQ_EXPR || code == NE_EXPR)
8863 && TREE_CODE (arg0) == ADDR_EXPR
8864 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8865 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8866 && integer_zerop (arg1))
8867 return constant_boolean_node (code != EQ_EXPR, type);
8869 /* If this is an equality comparison of the address of two non-weak,
8870 unaliased symbols neither of which are extern (since we do not
8871 have access to attributes for externs), then we know the result. */
8872 if ((code == EQ_EXPR || code == NE_EXPR)
8873 && TREE_CODE (arg0) == ADDR_EXPR
8874 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8875 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8876 && ! lookup_attribute ("alias",
8877 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8878 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8879 && TREE_CODE (arg1) == ADDR_EXPR
8880 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8881 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8882 && ! lookup_attribute ("alias",
8883 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8884 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8886 /* We know that we're looking at the address of two
8887 non-weak, unaliased, static _DECL nodes.
8889 It is both wasteful and incorrect to call operand_equal_p
8890 to compare the two ADDR_EXPR nodes. It is wasteful in that
8891 all we need to do is test pointer equality for the arguments
8892 to the two ADDR_EXPR nodes. It is incorrect to use
8893 operand_equal_p as that function is NOT equivalent to a
8894 C equality test. It can in fact return false for two
8895 objects which would test as equal using the C equality
8897 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8898 return constant_boolean_node (equal
8899 ? code == EQ_EXPR : code != EQ_EXPR,
8903 /* If this is a comparison of two exprs that look like an
8904 ARRAY_REF of the same object, then we can fold this to a
8905 comparison of the two offsets. This is only safe for
8906 EQ_EXPR and NE_EXPR because of overflow issues. */
8907 if (code == EQ_EXPR || code == NE_EXPR)
8909 tree base0, offset0, base1, offset1;
8911 if (extract_array_ref (arg0, &base0, &offset0)
8912 && extract_array_ref (arg1, &base1, &offset1)
8913 && operand_equal_p (base0, base1, 0))
8915 /* Handle no offsets on both sides specially. */
8916 if (offset0 == NULL_TREE
8917 && offset1 == NULL_TREE)
8918 return fold_build2 (code, type, integer_zero_node,
8921 if (!offset0 || !offset1
8922 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8924 if (offset0 == NULL_TREE)
8925 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8926 if (offset1 == NULL_TREE)
8927 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8928 return fold_build2 (code, type, offset0, offset1);
8933 /* Transform comparisons of the form X +- C CMP X. */
8934 if ((code != EQ_EXPR && code != NE_EXPR)
8935 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8937 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8938 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8939 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8940 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8941 && !(flag_wrapv || flag_trapv))))
8943 tree arg01 = TREE_OPERAND (arg0, 1);
8944 enum tree_code code0 = TREE_CODE (arg0);
8947 if (TREE_CODE (arg01) == REAL_CST)
8948 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8950 is_positive = tree_int_cst_sgn (arg01);
8952 /* (X - c) > X becomes false. */
8954 && ((code0 == MINUS_EXPR && is_positive >= 0)
8955 || (code0 == PLUS_EXPR && is_positive <= 0)))
8956 return constant_boolean_node (0, type);
8958 /* Likewise (X + c) < X becomes false. */
8960 && ((code0 == PLUS_EXPR && is_positive >= 0)
8961 || (code0 == MINUS_EXPR && is_positive <= 0)))
8962 return constant_boolean_node (0, type);
8964 /* Convert (X - c) <= X to true. */
8965 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8967 && ((code0 == MINUS_EXPR && is_positive >= 0)
8968 || (code0 == PLUS_EXPR && is_positive <= 0)))
8969 return constant_boolean_node (1, type);
8971 /* Convert (X + c) >= X to true. */
8972 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8974 && ((code0 == PLUS_EXPR && is_positive >= 0)
8975 || (code0 == MINUS_EXPR && is_positive <= 0)))
8976 return constant_boolean_node (1, type);
8978 if (TREE_CODE (arg01) == INTEGER_CST)
8980 /* Convert X + c > X and X - c < X to true for integers. */
8982 && ((code0 == PLUS_EXPR && is_positive > 0)
8983 || (code0 == MINUS_EXPR && is_positive < 0)))
8984 return constant_boolean_node (1, type);
8987 && ((code0 == MINUS_EXPR && is_positive > 0)
8988 || (code0 == PLUS_EXPR && is_positive < 0)))
8989 return constant_boolean_node (1, type);
8991 /* Convert X + c <= X and X - c >= X to false for integers. */
8993 && ((code0 == PLUS_EXPR && is_positive > 0)
8994 || (code0 == MINUS_EXPR && is_positive < 0)))
8995 return constant_boolean_node (0, type);
8998 && ((code0 == MINUS_EXPR && is_positive > 0)
8999 || (code0 == PLUS_EXPR && is_positive < 0)))
9000 return constant_boolean_node (0, type);
9004 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9005 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9006 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9007 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9008 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9009 && !(flag_wrapv || flag_trapv))
9010 && (TREE_CODE (arg1) == INTEGER_CST
9011 && !TREE_OVERFLOW (arg1)))
9013 tree const1 = TREE_OPERAND (arg0, 1);
9015 tree variable = TREE_OPERAND (arg0, 0);
9018 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9020 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9021 TREE_TYPE (arg1), const2, const1);
9022 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9023 && (TREE_CODE (lhs) != INTEGER_CST
9024 || !TREE_OVERFLOW (lhs)))
9025 return fold_build2 (code, type, variable, lhs);
9028 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9030 tree targ0 = strip_float_extensions (arg0);
9031 tree targ1 = strip_float_extensions (arg1);
9032 tree newtype = TREE_TYPE (targ0);
9034 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9035 newtype = TREE_TYPE (targ1);
9037 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9038 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9039 return fold_build2 (code, type, fold_convert (newtype, targ0),
9040 fold_convert (newtype, targ1));
9042 /* (-a) CMP (-b) -> b CMP a */
9043 if (TREE_CODE (arg0) == NEGATE_EXPR
9044 && TREE_CODE (arg1) == NEGATE_EXPR)
9045 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9046 TREE_OPERAND (arg0, 0));
9048 if (TREE_CODE (arg1) == REAL_CST)
9050 REAL_VALUE_TYPE cst;
9051 cst = TREE_REAL_CST (arg1);
9053 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9054 if (TREE_CODE (arg0) == NEGATE_EXPR)
9056 fold_build2 (swap_tree_comparison (code), type,
9057 TREE_OPERAND (arg0, 0),
9058 build_real (TREE_TYPE (arg1),
9059 REAL_VALUE_NEGATE (cst)));
9061 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9062 /* a CMP (-0) -> a CMP 0 */
9063 if (REAL_VALUE_MINUS_ZERO (cst))
9064 return fold_build2 (code, type, arg0,
9065 build_real (TREE_TYPE (arg1), dconst0));
9067 /* x != NaN is always true, other ops are always false. */
9068 if (REAL_VALUE_ISNAN (cst)
9069 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9071 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9072 return omit_one_operand (type, tem, arg0);
9075 /* Fold comparisons against infinity. */
9076 if (REAL_VALUE_ISINF (cst))
9078 tem = fold_inf_compare (code, type, arg0, arg1);
9079 if (tem != NULL_TREE)
9084 /* If this is a comparison of a real constant with a PLUS_EXPR
9085 or a MINUS_EXPR of a real constant, we can convert it into a
9086 comparison with a revised real constant as long as no overflow
9087 occurs when unsafe_math_optimizations are enabled. */
9088 if (flag_unsafe_math_optimizations
9089 && TREE_CODE (arg1) == REAL_CST
9090 && (TREE_CODE (arg0) == PLUS_EXPR
9091 || TREE_CODE (arg0) == MINUS_EXPR)
9092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9093 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9094 ? MINUS_EXPR : PLUS_EXPR,
9095 arg1, TREE_OPERAND (arg0, 1), 0))
9096 && ! TREE_CONSTANT_OVERFLOW (tem))
9097 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9099 /* Likewise, we can simplify a comparison of a real constant with
9100 a MINUS_EXPR whose first operand is also a real constant, i.e.
9101 (c1 - x) < c2 becomes x > c1-c2. */
9102 if (flag_unsafe_math_optimizations
9103 && TREE_CODE (arg1) == REAL_CST
9104 && TREE_CODE (arg0) == MINUS_EXPR
9105 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9106 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9108 && ! TREE_CONSTANT_OVERFLOW (tem))
9109 return fold_build2 (swap_tree_comparison (code), type,
9110 TREE_OPERAND (arg0, 1), tem);
9112 /* Fold comparisons against built-in math functions. */
9113 if (TREE_CODE (arg1) == REAL_CST
9114 && flag_unsafe_math_optimizations
9115 && ! flag_errno_math)
9117 enum built_in_function fcode = builtin_mathfn_code (arg0);
9119 if (fcode != END_BUILTINS)
9121 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9122 if (tem != NULL_TREE)
9128 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9129 if (TREE_CONSTANT (arg1)
9130 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9131 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9132 /* This optimization is invalid for ordered comparisons
9133 if CONST+INCR overflows or if foo+incr might overflow.
9134 This optimization is invalid for floating point due to rounding.
9135 For pointer types we assume overflow doesn't happen. */
9136 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9137 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9138 && (code == EQ_EXPR || code == NE_EXPR))))
9140 tree varop, newconst;
9142 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9144 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9145 arg1, TREE_OPERAND (arg0, 1));
9146 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9147 TREE_OPERAND (arg0, 0),
9148 TREE_OPERAND (arg0, 1));
9152 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9153 arg1, TREE_OPERAND (arg0, 1));
9154 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9155 TREE_OPERAND (arg0, 0),
9156 TREE_OPERAND (arg0, 1));
9160 /* If VAROP is a reference to a bitfield, we must mask
9161 the constant by the width of the field. */
9162 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9163 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9164 && host_integerp (DECL_SIZE (TREE_OPERAND
9165 (TREE_OPERAND (varop, 0), 1)), 1))
9167 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9168 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9169 tree folded_compare, shift;
9171 /* First check whether the comparison would come out
9172 always the same. If we don't do that we would
9173 change the meaning with the masking. */
9174 folded_compare = fold_build2 (code, type,
9175 TREE_OPERAND (varop, 0), arg1);
9176 if (integer_zerop (folded_compare)
9177 || integer_onep (folded_compare))
9178 return omit_one_operand (type, folded_compare, varop);
9180 shift = build_int_cst (NULL_TREE,
9181 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9182 shift = fold_convert (TREE_TYPE (varop), shift);
9183 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9185 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9189 return fold_build2 (code, type, varop, newconst);
9192 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9193 This transformation affects the cases which are handled in later
9194 optimizations involving comparisons with non-negative constants. */
9195 if (TREE_CODE (arg1) == INTEGER_CST
9196 && TREE_CODE (arg0) != INTEGER_CST
9197 && tree_int_cst_sgn (arg1) > 0)
9202 arg1 = const_binop (MINUS_EXPR, arg1,
9203 build_int_cst (TREE_TYPE (arg1), 1), 0);
9204 return fold_build2 (GT_EXPR, type, arg0,
9205 fold_convert (TREE_TYPE (arg0), arg1));
9208 arg1 = const_binop (MINUS_EXPR, arg1,
9209 build_int_cst (TREE_TYPE (arg1), 1), 0);
9210 return fold_build2 (LE_EXPR, type, arg0,
9211 fold_convert (TREE_TYPE (arg0), arg1));
9218 /* Comparisons with the highest or lowest possible integer of
9219 the specified size will have known values. */
9221 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9223 if (TREE_CODE (arg1) == INTEGER_CST
9224 && ! TREE_CONSTANT_OVERFLOW (arg1)
9225 && width <= 2 * HOST_BITS_PER_WIDE_INT
9226 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9227 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9229 HOST_WIDE_INT signed_max_hi;
9230 unsigned HOST_WIDE_INT signed_max_lo;
9231 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9233 if (width <= HOST_BITS_PER_WIDE_INT)
9235 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9240 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9242 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9248 max_lo = signed_max_lo;
9249 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9255 width -= HOST_BITS_PER_WIDE_INT;
9257 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9262 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9264 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9269 max_hi = signed_max_hi;
9270 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9274 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9275 && TREE_INT_CST_LOW (arg1) == max_lo)
9279 return omit_one_operand (type, integer_zero_node, arg0);
9282 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9285 return omit_one_operand (type, integer_one_node, arg0);
9288 return fold_build2 (NE_EXPR, type, arg0, arg1);
9290 /* The GE_EXPR and LT_EXPR cases above are not normally
9291 reached because of previous transformations. */
9296 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9298 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9302 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9303 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9305 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9306 return fold_build2 (NE_EXPR, type, arg0, arg1);
9310 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9312 && TREE_INT_CST_LOW (arg1) == min_lo)
9316 return omit_one_operand (type, integer_zero_node, arg0);
9319 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9322 return omit_one_operand (type, integer_one_node, arg0);
9325 return fold_build2 (NE_EXPR, type, op0, op1);
9330 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9332 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9336 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9337 return fold_build2 (NE_EXPR, type, arg0, arg1);
9339 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9340 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9345 else if (!in_gimple_form
9346 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9347 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9348 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9349 /* signed_type does not work on pointer types. */
9350 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9352 /* The following case also applies to X < signed_max+1
9353 and X >= signed_max+1 because previous transformations. */
9354 if (code == LE_EXPR || code == GT_EXPR)
9357 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9358 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9359 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9360 type, fold_convert (st0, arg0),
9361 build_int_cst (st1, 0));
9367 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9368 a MINUS_EXPR of a constant, we can convert it into a comparison with
9369 a revised constant as long as no overflow occurs. */
9370 if ((code == EQ_EXPR || code == NE_EXPR)
9371 && TREE_CODE (arg1) == INTEGER_CST
9372 && (TREE_CODE (arg0) == PLUS_EXPR
9373 || TREE_CODE (arg0) == MINUS_EXPR)
9374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9375 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9376 ? MINUS_EXPR : PLUS_EXPR,
9377 fold_convert (TREE_TYPE (arg0), arg1),
9378 TREE_OPERAND (arg0, 1), 0))
9379 && ! TREE_CONSTANT_OVERFLOW (tem))
9380 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9382 /* Similarly for a NEGATE_EXPR. */
9383 else if ((code == EQ_EXPR || code == NE_EXPR)
9384 && TREE_CODE (arg0) == NEGATE_EXPR
9385 && TREE_CODE (arg1) == INTEGER_CST
9386 && 0 != (tem = negate_expr (arg1))
9387 && TREE_CODE (tem) == INTEGER_CST
9388 && ! TREE_CONSTANT_OVERFLOW (tem))
9389 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9391 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9392 for !=. Don't do this for ordered comparisons due to overflow. */
9393 else if ((code == NE_EXPR || code == EQ_EXPR)
9394 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9395 return fold_build2 (code, type,
9396 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9398 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9399 && (TREE_CODE (arg0) == NOP_EXPR
9400 || TREE_CODE (arg0) == CONVERT_EXPR))
9402 /* If we are widening one operand of an integer comparison,
9403 see if the other operand is similarly being widened. Perhaps we
9404 can do the comparison in the narrower type. */
9405 tem = fold_widened_comparison (code, type, arg0, arg1);
9409 /* Or if we are changing signedness. */
9410 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9415 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9416 constant, we can simplify it. */
9417 else if (TREE_CODE (arg1) == INTEGER_CST
9418 && (TREE_CODE (arg0) == MIN_EXPR
9419 || TREE_CODE (arg0) == MAX_EXPR)
9420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9422 tem = optimize_minmax_comparison (code, type, op0, op1);
9429 /* If we are comparing an ABS_EXPR with a constant, we can
9430 convert all the cases into explicit comparisons, but they may
9431 well not be faster than doing the ABS and one comparison.
9432 But ABS (X) <= C is a range comparison, which becomes a subtraction
9433 and a comparison, and is probably faster. */
9434 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9435 && TREE_CODE (arg0) == ABS_EXPR
9436 && ! TREE_SIDE_EFFECTS (arg0)
9437 && (0 != (tem = negate_expr (arg1)))
9438 && TREE_CODE (tem) == INTEGER_CST
9439 && ! TREE_CONSTANT_OVERFLOW (tem))
9440 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9441 build2 (GE_EXPR, type,
9442 TREE_OPERAND (arg0, 0), tem),
9443 build2 (LE_EXPR, type,
9444 TREE_OPERAND (arg0, 0), arg1));
9446 /* Convert ABS_EXPR<x> >= 0 to true. */
9447 else if (code == GE_EXPR
9448 && tree_expr_nonnegative_p (arg0)
9449 && (integer_zerop (arg1)
9450 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9451 && real_zerop (arg1))))
9452 return omit_one_operand (type, integer_one_node, arg0);
9454 /* Convert ABS_EXPR<x> < 0 to false. */
9455 else if (code == LT_EXPR
9456 && tree_expr_nonnegative_p (arg0)
9457 && (integer_zerop (arg1) || real_zerop (arg1)))
9458 return omit_one_operand (type, integer_zero_node, arg0);
9460 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9461 else if ((code == EQ_EXPR || code == NE_EXPR)
9462 && TREE_CODE (arg0) == ABS_EXPR
9463 && (integer_zerop (arg1) || real_zerop (arg1)))
9464 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9466 /* If this is an EQ or NE comparison with zero and ARG0 is
9467 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9468 two operations, but the latter can be done in one less insn
9469 on machines that have only two-operand insns or on which a
9470 constant cannot be the first operand. */
9471 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9472 && TREE_CODE (arg0) == BIT_AND_EXPR)
9474 tree arg00 = TREE_OPERAND (arg0, 0);
9475 tree arg01 = TREE_OPERAND (arg0, 1);
9476 if (TREE_CODE (arg00) == LSHIFT_EXPR
9477 && integer_onep (TREE_OPERAND (arg00, 0)))
9479 fold_build2 (code, type,
9480 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9481 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9482 arg01, TREE_OPERAND (arg00, 1)),
9483 fold_convert (TREE_TYPE (arg0),
9486 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9487 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9489 fold_build2 (code, type,
9490 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9491 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9492 arg00, TREE_OPERAND (arg01, 1)),
9493 fold_convert (TREE_TYPE (arg0),
9498 /* If this is an NE or EQ comparison of zero against the result of a
9499 signed MOD operation whose second operand is a power of 2, make
9500 the MOD operation unsigned since it is simpler and equivalent. */
9501 if ((code == NE_EXPR || code == EQ_EXPR)
9502 && integer_zerop (arg1)
9503 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9504 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9505 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9506 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9507 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9508 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9510 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9511 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9512 fold_convert (newtype,
9513 TREE_OPERAND (arg0, 0)),
9514 fold_convert (newtype,
9515 TREE_OPERAND (arg0, 1)));
9517 return fold_build2 (code, type, newmod,
9518 fold_convert (newtype, arg1));
9521 /* If this is an NE comparison of zero with an AND of one, remove the
9522 comparison since the AND will give the correct value. */
9523 if (code == NE_EXPR && integer_zerop (arg1)
9524 && TREE_CODE (arg0) == BIT_AND_EXPR
9525 && integer_onep (TREE_OPERAND (arg0, 1)))
9526 return fold_convert (type, arg0);
9528 /* If we have (A & C) == C where C is a power of 2, convert this into
9529 (A & C) != 0. Similarly for NE_EXPR. */
9530 if ((code == EQ_EXPR || code == NE_EXPR)
9531 && TREE_CODE (arg0) == BIT_AND_EXPR
9532 && integer_pow2p (TREE_OPERAND (arg0, 1))
9533 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9534 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9535 arg0, fold_convert (TREE_TYPE (arg0),
9536 integer_zero_node));
9538 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9539 bit, then fold the expression into A < 0 or A >= 0. */
9540 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9544 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9545 Similarly for NE_EXPR. */
9546 if ((code == EQ_EXPR || code == NE_EXPR)
9547 && TREE_CODE (arg0) == BIT_AND_EXPR
9548 && TREE_CODE (arg1) == INTEGER_CST
9549 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9551 tree notc = fold_build1 (BIT_NOT_EXPR,
9552 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9553 TREE_OPERAND (arg0, 1));
9554 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9556 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9557 if (integer_nonzerop (dandnotc))
9558 return omit_one_operand (type, rslt, arg0);
9561 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9562 Similarly for NE_EXPR. */
9563 if ((code == EQ_EXPR || code == NE_EXPR)
9564 && TREE_CODE (arg0) == BIT_IOR_EXPR
9565 && TREE_CODE (arg1) == INTEGER_CST
9566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9568 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9569 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9570 TREE_OPERAND (arg0, 1), notd);
9571 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9572 if (integer_nonzerop (candnotd))
9573 return omit_one_operand (type, rslt, arg0);
9576 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9577 and similarly for >= into !=. */
9578 if ((code == LT_EXPR || code == GE_EXPR)
9579 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9580 && TREE_CODE (arg1) == LSHIFT_EXPR
9581 && integer_onep (TREE_OPERAND (arg1, 0)))
9582 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9583 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9584 TREE_OPERAND (arg1, 1)),
9585 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9587 else if ((code == LT_EXPR || code == GE_EXPR)
9588 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9589 && (TREE_CODE (arg1) == NOP_EXPR
9590 || TREE_CODE (arg1) == CONVERT_EXPR)
9591 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9592 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9594 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9595 fold_convert (TREE_TYPE (arg0),
9596 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9597 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9599 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9601 /* Simplify comparison of something with itself. (For IEEE
9602 floating-point, we can only do some of these simplifications.) */
9603 if (operand_equal_p (arg0, arg1, 0))
9608 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9609 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9610 return constant_boolean_node (1, type);
9615 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9616 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9617 return constant_boolean_node (1, type);
9618 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9621 /* For NE, we can only do this simplification if integer
9622 or we don't honor IEEE floating point NaNs. */
9623 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9624 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9626 /* ... fall through ... */
9629 return constant_boolean_node (0, type);
9635 /* If we are comparing an expression that just has comparisons
9636 of two integer values, arithmetic expressions of those comparisons,
9637 and constants, we can simplify it. There are only three cases
9638 to check: the two values can either be equal, the first can be
9639 greater, or the second can be greater. Fold the expression for
9640 those three values. Since each value must be 0 or 1, we have
9641 eight possibilities, each of which corresponds to the constant 0
9642 or 1 or one of the six possible comparisons.
9644 This handles common cases like (a > b) == 0 but also handles
9645 expressions like ((x > y) - (y > x)) > 0, which supposedly
9646 occur in macroized code. */
9648 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9650 tree cval1 = 0, cval2 = 0;
9653 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9654 /* Don't handle degenerate cases here; they should already
9655 have been handled anyway. */
9656 && cval1 != 0 && cval2 != 0
9657 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9658 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9659 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9660 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9661 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9662 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9663 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9665 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9666 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9668 /* We can't just pass T to eval_subst in case cval1 or cval2
9669 was the same as ARG1. */
9672 = fold_build2 (code, type,
9673 eval_subst (arg0, cval1, maxval,
9677 = fold_build2 (code, type,
9678 eval_subst (arg0, cval1, maxval,
9682 = fold_build2 (code, type,
9683 eval_subst (arg0, cval1, minval,
9687 /* All three of these results should be 0 or 1. Confirm they
9688 are. Then use those values to select the proper code
9691 if ((integer_zerop (high_result)
9692 || integer_onep (high_result))
9693 && (integer_zerop (equal_result)
9694 || integer_onep (equal_result))
9695 && (integer_zerop (low_result)
9696 || integer_onep (low_result)))
9698 /* Make a 3-bit mask with the high-order bit being the
9699 value for `>', the next for '=', and the low for '<'. */
9700 switch ((integer_onep (high_result) * 4)
9701 + (integer_onep (equal_result) * 2)
9702 + integer_onep (low_result))
9706 return omit_one_operand (type, integer_zero_node, arg0);
9727 return omit_one_operand (type, integer_one_node, arg0);
9731 return save_expr (build2 (code, type, cval1, cval2));
9733 return fold_build2 (code, type, cval1, cval2);
9738 /* If this is a comparison of a field, we may be able to simplify it. */
9739 if (((TREE_CODE (arg0) == COMPONENT_REF
9740 && lang_hooks.can_use_bit_fields_p ())
9741 || TREE_CODE (arg0) == BIT_FIELD_REF)
9742 && (code == EQ_EXPR || code == NE_EXPR)
9743 /* Handle the constant case even without -O
9744 to make sure the warnings are given. */
9745 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9747 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9752 /* Fold a comparison of the address of COMPONENT_REFs with the same
9753 type and component to a comparison of the address of the base
9754 object. In short, &x->a OP &y->a to x OP y and
9755 &x->a OP &y.a to x OP &y */
9756 if (TREE_CODE (arg0) == ADDR_EXPR
9757 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9758 && TREE_CODE (arg1) == ADDR_EXPR
9759 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9761 tree cref0 = TREE_OPERAND (arg0, 0);
9762 tree cref1 = TREE_OPERAND (arg1, 0);
9763 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9765 tree op0 = TREE_OPERAND (cref0, 0);
9766 tree op1 = TREE_OPERAND (cref1, 0);
9767 return fold_build2 (code, type,
9768 build_fold_addr_expr (op0),
9769 build_fold_addr_expr (op1));
9773 /* Optimize comparisons of strlen vs zero to a compare of the
9774 first character of the string vs zero. To wit,
9775 strlen(ptr) == 0 => *ptr == 0
9776 strlen(ptr) != 0 => *ptr != 0
9777 Other cases should reduce to one of these two (or a constant)
9778 due to the return value of strlen being unsigned. */
9779 if ((code == EQ_EXPR || code == NE_EXPR)
9780 && integer_zerop (arg1)
9781 && TREE_CODE (arg0) == CALL_EXPR)
9783 tree fndecl = get_callee_fndecl (arg0);
9787 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9788 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9789 && (arglist = TREE_OPERAND (arg0, 1))
9790 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9791 && ! TREE_CHAIN (arglist))
9793 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9794 return fold_build2 (code, type, iref,
9795 build_int_cst (TREE_TYPE (iref), 0));
9799 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9800 into a single range test. */
9801 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9802 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9803 && TREE_CODE (arg1) == INTEGER_CST
9804 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9805 && !integer_zerop (TREE_OPERAND (arg0, 1))
9806 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9807 && !TREE_OVERFLOW (arg1))
9809 t1 = fold_div_compare (code, type, arg0, arg1);
9810 if (t1 != NULL_TREE)
9814 if ((code == EQ_EXPR || code == NE_EXPR)
9815 && integer_zerop (arg1)
9816 && tree_expr_nonzero_p (arg0))
9818 tree res = constant_boolean_node (code==NE_EXPR, type);
9819 return omit_one_operand (type, res, arg0);
9822 t1 = fold_relational_const (code, type, arg0, arg1);
9823 return t1 == NULL_TREE ? NULL_TREE : t1;
9825 case UNORDERED_EXPR:
9833 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9835 t1 = fold_relational_const (code, type, arg0, arg1);
9836 if (t1 != NULL_TREE)
9840 /* If the first operand is NaN, the result is constant. */
9841 if (TREE_CODE (arg0) == REAL_CST
9842 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9843 && (code != LTGT_EXPR || ! flag_trapping_math))
9845 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9848 return omit_one_operand (type, t1, arg1);
9851 /* If the second operand is NaN, the result is constant. */
9852 if (TREE_CODE (arg1) == REAL_CST
9853 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9854 && (code != LTGT_EXPR || ! flag_trapping_math))
9856 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9859 return omit_one_operand (type, t1, arg0);
9862 /* Simplify unordered comparison of something with itself. */
9863 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9864 && operand_equal_p (arg0, arg1, 0))
9865 return constant_boolean_node (1, type);
9867 if (code == LTGT_EXPR
9868 && !flag_trapping_math
9869 && operand_equal_p (arg0, arg1, 0))
9870 return constant_boolean_node (0, type);
9872 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9874 tree targ0 = strip_float_extensions (arg0);
9875 tree targ1 = strip_float_extensions (arg1);
9876 tree newtype = TREE_TYPE (targ0);
9878 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9879 newtype = TREE_TYPE (targ1);
9881 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9882 return fold_build2 (code, type, fold_convert (newtype, targ0),
9883 fold_convert (newtype, targ1));
9889 /* When pedantic, a compound expression can be neither an lvalue
9890 nor an integer constant expression. */
9891 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9893 /* Don't let (0, 0) be null pointer constant. */
9894 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9895 : fold_convert (type, arg1);
9896 return pedantic_non_lvalue (tem);
9900 return build_complex (type, arg0, arg1);
9904 /* An ASSERT_EXPR should never be passed to fold_binary. */
9909 } /* switch (code) */
9912 /* Callback for walk_tree, looking for LABEL_EXPR.
9913 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9914 Do not check the sub-tree of GOTO_EXPR. */
9917 contains_label_1 (tree *tp,
9919 void *data ATTRIBUTE_UNUSED)
9921 switch (TREE_CODE (*tp))
9933 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9934 accessible from outside the sub-tree. Returns NULL_TREE if no
9935 addressable label is found. */
9938 contains_label_p (tree st)
9940 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9943 /* Fold a ternary expression of code CODE and type TYPE with operands
9944 OP0, OP1, and OP2. Return the folded expression if folding is
9945 successful. Otherwise, return NULL_TREE. */
9948 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9951 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9952 enum tree_code_class kind = TREE_CODE_CLASS (code);
9954 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9955 && TREE_CODE_LENGTH (code) == 3);
9957 /* Strip any conversions that don't change the mode. This is safe
9958 for every expression, except for a comparison expression because
9959 its signedness is derived from its operands. So, in the latter
9960 case, only strip conversions that don't change the signedness.
9962 Note that this is done as an internal manipulation within the
9963 constant folder, in order to find the simplest representation of
9964 the arguments so that their form can be studied. In any cases,
9965 the appropriate type conversions should be put back in the tree
9966 that will get out of the constant folder. */
9982 if (TREE_CODE (arg0) == CONSTRUCTOR
9983 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9985 unsigned HOST_WIDE_INT idx;
9987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9994 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9995 so all simple results must be passed through pedantic_non_lvalue. */
9996 if (TREE_CODE (arg0) == INTEGER_CST)
9998 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9999 tem = integer_zerop (arg0) ? op2 : op1;
10000 /* Only optimize constant conditions when the selected branch
10001 has the same type as the COND_EXPR. This avoids optimizing
10002 away "c ? x : throw", where the throw has a void type.
10003 Avoid throwing away that operand which contains label. */
10004 if ((!TREE_SIDE_EFFECTS (unused_op)
10005 || !contains_label_p (unused_op))
10006 && (! VOID_TYPE_P (TREE_TYPE (tem))
10007 || VOID_TYPE_P (type)))
10008 return pedantic_non_lvalue (tem);
10011 if (operand_equal_p (arg1, op2, 0))
10012 return pedantic_omit_one_operand (type, arg1, arg0);
10014 /* If we have A op B ? A : C, we may be able to convert this to a
10015 simpler expression, depending on the operation and the values
10016 of B and C. Signed zeros prevent all of these transformations,
10017 for reasons given above each one.
10019 Also try swapping the arguments and inverting the conditional. */
10020 if (COMPARISON_CLASS_P (arg0)
10021 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10022 arg1, TREE_OPERAND (arg0, 1))
10023 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10025 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10030 if (COMPARISON_CLASS_P (arg0)
10031 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10033 TREE_OPERAND (arg0, 1))
10034 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10036 tem = invert_truthvalue (arg0);
10037 if (COMPARISON_CLASS_P (tem))
10039 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10045 /* If the second operand is simpler than the third, swap them
10046 since that produces better jump optimization results. */
10047 if (truth_value_p (TREE_CODE (arg0))
10048 && tree_swap_operands_p (op1, op2, false))
10050 /* See if this can be inverted. If it can't, possibly because
10051 it was a floating-point inequality comparison, don't do
10053 tem = invert_truthvalue (arg0);
10055 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10056 return fold_build3 (code, type, tem, op2, op1);
10059 /* Convert A ? 1 : 0 to simply A. */
10060 if (integer_onep (op1)
10061 && integer_zerop (op2)
10062 /* If we try to convert OP0 to our type, the
10063 call to fold will try to move the conversion inside
10064 a COND, which will recurse. In that case, the COND_EXPR
10065 is probably the best choice, so leave it alone. */
10066 && type == TREE_TYPE (arg0))
10067 return pedantic_non_lvalue (arg0);
10069 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10070 over COND_EXPR in cases such as floating point comparisons. */
10071 if (integer_zerop (op1)
10072 && integer_onep (op2)
10073 && truth_value_p (TREE_CODE (arg0)))
10074 return pedantic_non_lvalue (fold_convert (type,
10075 invert_truthvalue (arg0)));
10077 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10078 if (TREE_CODE (arg0) == LT_EXPR
10079 && integer_zerop (TREE_OPERAND (arg0, 1))
10080 && integer_zerop (op2)
10081 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10083 /* sign_bit_p only checks ARG1 bits within A's precision.
10084 If <sign bit of A> has wider type than A, bits outside
10085 of A's precision in <sign bit of A> need to be checked.
10086 If they are all 0, this optimization needs to be done
10087 in unsigned A's type, if they are all 1 in signed A's type,
10088 otherwise this can't be done. */
10089 if (TYPE_PRECISION (TREE_TYPE (tem))
10090 < TYPE_PRECISION (TREE_TYPE (arg1))
10091 && TYPE_PRECISION (TREE_TYPE (tem))
10092 < TYPE_PRECISION (type))
10094 unsigned HOST_WIDE_INT mask_lo;
10095 HOST_WIDE_INT mask_hi;
10096 int inner_width, outer_width;
10099 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
10100 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
10101 if (outer_width > TYPE_PRECISION (type))
10102 outer_width = TYPE_PRECISION (type);
10104 if (outer_width > HOST_BITS_PER_WIDE_INT)
10106 mask_hi = ((unsigned HOST_WIDE_INT) -1
10107 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
10113 mask_lo = ((unsigned HOST_WIDE_INT) -1
10114 >> (HOST_BITS_PER_WIDE_INT - outer_width));
10116 if (inner_width > HOST_BITS_PER_WIDE_INT)
10118 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
10119 >> (HOST_BITS_PER_WIDE_INT - inner_width));
10123 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
10124 >> (HOST_BITS_PER_WIDE_INT - inner_width));
10126 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
10127 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
10129 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
10130 tem = fold_convert (tem_type, tem);
10132 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
10133 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
10135 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
10136 tem = fold_convert (tem_type, tem);
10143 return fold_convert (type,
10144 fold_build2 (BIT_AND_EXPR,
10145 TREE_TYPE (tem), tem,
10146 fold_convert (TREE_TYPE (tem),
10150 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10151 already handled above. */
10152 if (TREE_CODE (arg0) == BIT_AND_EXPR
10153 && integer_onep (TREE_OPERAND (arg0, 1))
10154 && integer_zerop (op2)
10155 && integer_pow2p (arg1))
10157 tree tem = TREE_OPERAND (arg0, 0);
10159 if (TREE_CODE (tem) == RSHIFT_EXPR
10160 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10161 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10162 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10163 return fold_build2 (BIT_AND_EXPR, type,
10164 TREE_OPERAND (tem, 0), arg1);
10167 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10168 is probably obsolete because the first operand should be a
10169 truth value (that's why we have the two cases above), but let's
10170 leave it in until we can confirm this for all front-ends. */
10171 if (integer_zerop (op2)
10172 && TREE_CODE (arg0) == NE_EXPR
10173 && integer_zerop (TREE_OPERAND (arg0, 1))
10174 && integer_pow2p (arg1)
10175 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10176 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10177 arg1, OEP_ONLY_CONST))
10178 return pedantic_non_lvalue (fold_convert (type,
10179 TREE_OPERAND (arg0, 0)));
10181 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10182 if (integer_zerop (op2)
10183 && truth_value_p (TREE_CODE (arg0))
10184 && truth_value_p (TREE_CODE (arg1)))
10185 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10186 fold_convert (type, arg0),
10189 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10190 if (integer_onep (op2)
10191 && truth_value_p (TREE_CODE (arg0))
10192 && truth_value_p (TREE_CODE (arg1)))
10194 /* Only perform transformation if ARG0 is easily inverted. */
10195 tem = invert_truthvalue (arg0);
10196 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10197 return fold_build2 (TRUTH_ORIF_EXPR, type,
10198 fold_convert (type, tem),
10202 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10203 if (integer_zerop (arg1)
10204 && truth_value_p (TREE_CODE (arg0))
10205 && truth_value_p (TREE_CODE (op2)))
10207 /* Only perform transformation if ARG0 is easily inverted. */
10208 tem = invert_truthvalue (arg0);
10209 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10210 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10211 fold_convert (type, tem),
10215 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10216 if (integer_onep (arg1)
10217 && truth_value_p (TREE_CODE (arg0))
10218 && truth_value_p (TREE_CODE (op2)))
10219 return fold_build2 (TRUTH_ORIF_EXPR, type,
10220 fold_convert (type, arg0),
10226 /* Check for a built-in function. */
10227 if (TREE_CODE (op0) == ADDR_EXPR
10228 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10229 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10230 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10233 case BIT_FIELD_REF:
10234 if (TREE_CODE (arg0) == VECTOR_CST
10235 && type == TREE_TYPE (TREE_TYPE (arg0))
10236 && host_integerp (arg1, 1)
10237 && host_integerp (op2, 1))
10239 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10240 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10243 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10244 && (idx % width) == 0
10245 && (idx = idx / width)
10246 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10248 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10249 while (idx-- > 0 && elements)
10250 elements = TREE_CHAIN (elements);
10252 return TREE_VALUE (elements);
10254 return fold_convert (type, integer_zero_node);
10261 } /* switch (code) */
10264 /* Perform constant folding and related simplification of EXPR.
10265 The related simplifications include x*1 => x, x*0 => 0, etc.,
10266 and application of the associative law.
10267 NOP_EXPR conversions may be removed freely (as long as we
10268 are careful not to change the type of the overall expression).
10269 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10270 but we can constant-fold them if they have constant operands. */
10272 #ifdef ENABLE_FOLD_CHECKING
10273 # define fold(x) fold_1 (x)
10274 static tree fold_1 (tree);
10280 const tree t = expr;
10281 enum tree_code code = TREE_CODE (t);
10282 enum tree_code_class kind = TREE_CODE_CLASS (code);
10285 /* Return right away if a constant. */
10286 if (kind == tcc_constant)
10289 if (IS_EXPR_CODE_CLASS (kind))
10291 tree type = TREE_TYPE (t);
10292 tree op0, op1, op2;
10294 switch (TREE_CODE_LENGTH (code))
10297 op0 = TREE_OPERAND (t, 0);
10298 tem = fold_unary (code, type, op0);
10299 return tem ? tem : expr;
10301 op0 = TREE_OPERAND (t, 0);
10302 op1 = TREE_OPERAND (t, 1);
10303 tem = fold_binary (code, type, op0, op1);
10304 return tem ? tem : expr;
10306 op0 = TREE_OPERAND (t, 0);
10307 op1 = TREE_OPERAND (t, 1);
10308 op2 = TREE_OPERAND (t, 2);
10309 tem = fold_ternary (code, type, op0, op1, op2);
10310 return tem ? tem : expr;
10319 return fold (DECL_INITIAL (t));
10323 } /* switch (code) */
10326 #ifdef ENABLE_FOLD_CHECKING
10329 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10330 static void fold_check_failed (tree, tree);
10331 void print_fold_checksum (tree);
10333 /* When --enable-checking=fold, compute a digest of expr before
10334 and after actual fold call to see if fold did not accidentally
10335 change original expr. */
10341 struct md5_ctx ctx;
10342 unsigned char checksum_before[16], checksum_after[16];
10345 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10346 md5_init_ctx (&ctx);
10347 fold_checksum_tree (expr, &ctx, ht);
10348 md5_finish_ctx (&ctx, checksum_before);
10351 ret = fold_1 (expr);
10353 md5_init_ctx (&ctx);
10354 fold_checksum_tree (expr, &ctx, ht);
10355 md5_finish_ctx (&ctx, checksum_after);
10358 if (memcmp (checksum_before, checksum_after, 16))
10359 fold_check_failed (expr, ret);
10365 print_fold_checksum (tree expr)
10367 struct md5_ctx ctx;
10368 unsigned char checksum[16], cnt;
10371 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10372 md5_init_ctx (&ctx);
10373 fold_checksum_tree (expr, &ctx, ht);
10374 md5_finish_ctx (&ctx, checksum);
10376 for (cnt = 0; cnt < 16; ++cnt)
10377 fprintf (stderr, "%02x", checksum[cnt]);
10378 putc ('\n', stderr);
10382 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10384 internal_error ("fold check: original tree changed by fold");
10388 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10391 enum tree_code code;
10392 struct tree_function_decl buf;
10397 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10398 <= sizeof (struct tree_function_decl))
10399 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10402 slot = htab_find_slot (ht, expr, INSERT);
10406 code = TREE_CODE (expr);
10407 if (TREE_CODE_CLASS (code) == tcc_declaration
10408 && DECL_ASSEMBLER_NAME_SET_P (expr))
10410 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10411 memcpy ((char *) &buf, expr, tree_size (expr));
10412 expr = (tree) &buf;
10413 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10415 else if (TREE_CODE_CLASS (code) == tcc_type
10416 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10417 || TYPE_CACHED_VALUES_P (expr)
10418 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10420 /* Allow these fields to be modified. */
10421 memcpy ((char *) &buf, expr, tree_size (expr));
10422 expr = (tree) &buf;
10423 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10424 TYPE_POINTER_TO (expr) = NULL;
10425 TYPE_REFERENCE_TO (expr) = NULL;
10426 if (TYPE_CACHED_VALUES_P (expr))
10428 TYPE_CACHED_VALUES_P (expr) = 0;
10429 TYPE_CACHED_VALUES (expr) = NULL;
10432 md5_process_bytes (expr, tree_size (expr), ctx);
10433 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10434 if (TREE_CODE_CLASS (code) != tcc_type
10435 && TREE_CODE_CLASS (code) != tcc_declaration
10436 && code != TREE_LIST)
10437 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10438 switch (TREE_CODE_CLASS (code))
10444 md5_process_bytes (TREE_STRING_POINTER (expr),
10445 TREE_STRING_LENGTH (expr), ctx);
10448 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10449 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10452 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10458 case tcc_exceptional:
10462 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10463 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10464 expr = TREE_CHAIN (expr);
10465 goto recursive_label;
10468 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10469 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10475 case tcc_expression:
10476 case tcc_reference:
10477 case tcc_comparison:
10480 case tcc_statement:
10481 len = TREE_CODE_LENGTH (code);
10482 for (i = 0; i < len; ++i)
10483 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10485 case tcc_declaration:
10486 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10487 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10488 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10489 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10490 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10491 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10492 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10493 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10494 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10496 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10498 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10499 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10500 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10504 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10505 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10506 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10507 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10508 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10509 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10510 if (INTEGRAL_TYPE_P (expr)
10511 || SCALAR_FLOAT_TYPE_P (expr))
10513 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10514 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10516 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10517 if (TREE_CODE (expr) == RECORD_TYPE
10518 || TREE_CODE (expr) == UNION_TYPE
10519 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10520 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10521 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10530 /* Fold a unary tree expression with code CODE of type TYPE with an
10531 operand OP0. Return a folded expression if successful. Otherwise,
10532 return a tree expression with code CODE of type TYPE with an
10536 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10539 #ifdef ENABLE_FOLD_CHECKING
10540 unsigned char checksum_before[16], checksum_after[16];
10541 struct md5_ctx ctx;
10544 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10545 md5_init_ctx (&ctx);
10546 fold_checksum_tree (op0, &ctx, ht);
10547 md5_finish_ctx (&ctx, checksum_before);
10551 tem = fold_unary (code, type, op0);
10553 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10555 #ifdef ENABLE_FOLD_CHECKING
10556 md5_init_ctx (&ctx);
10557 fold_checksum_tree (op0, &ctx, ht);
10558 md5_finish_ctx (&ctx, checksum_after);
10561 if (memcmp (checksum_before, checksum_after, 16))
10562 fold_check_failed (op0, tem);
10567 /* Fold a binary tree expression with code CODE of type TYPE with
10568 operands OP0 and OP1. Return a folded expression if successful.
10569 Otherwise, return a tree expression with code CODE of type TYPE
10570 with operands OP0 and OP1. */
10573 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10577 #ifdef ENABLE_FOLD_CHECKING
10578 unsigned char checksum_before_op0[16],
10579 checksum_before_op1[16],
10580 checksum_after_op0[16],
10581 checksum_after_op1[16];
10582 struct md5_ctx ctx;
10585 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10586 md5_init_ctx (&ctx);
10587 fold_checksum_tree (op0, &ctx, ht);
10588 md5_finish_ctx (&ctx, checksum_before_op0);
10591 md5_init_ctx (&ctx);
10592 fold_checksum_tree (op1, &ctx, ht);
10593 md5_finish_ctx (&ctx, checksum_before_op1);
10597 tem = fold_binary (code, type, op0, op1);
10599 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10601 #ifdef ENABLE_FOLD_CHECKING
10602 md5_init_ctx (&ctx);
10603 fold_checksum_tree (op0, &ctx, ht);
10604 md5_finish_ctx (&ctx, checksum_after_op0);
10607 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10608 fold_check_failed (op0, tem);
10610 md5_init_ctx (&ctx);
10611 fold_checksum_tree (op1, &ctx, ht);
10612 md5_finish_ctx (&ctx, checksum_after_op1);
10615 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10616 fold_check_failed (op1, tem);
10621 /* Fold a ternary tree expression with code CODE of type TYPE with
10622 operands OP0, OP1, and OP2. Return a folded expression if
10623 successful. Otherwise, return a tree expression with code CODE of
10624 type TYPE with operands OP0, OP1, and OP2. */
10627 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10631 #ifdef ENABLE_FOLD_CHECKING
10632 unsigned char checksum_before_op0[16],
10633 checksum_before_op1[16],
10634 checksum_before_op2[16],
10635 checksum_after_op0[16],
10636 checksum_after_op1[16],
10637 checksum_after_op2[16];
10638 struct md5_ctx ctx;
10641 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10642 md5_init_ctx (&ctx);
10643 fold_checksum_tree (op0, &ctx, ht);
10644 md5_finish_ctx (&ctx, checksum_before_op0);
10647 md5_init_ctx (&ctx);
10648 fold_checksum_tree (op1, &ctx, ht);
10649 md5_finish_ctx (&ctx, checksum_before_op1);
10652 md5_init_ctx (&ctx);
10653 fold_checksum_tree (op2, &ctx, ht);
10654 md5_finish_ctx (&ctx, checksum_before_op2);
10658 tem = fold_ternary (code, type, op0, op1, op2);
10660 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10662 #ifdef ENABLE_FOLD_CHECKING
10663 md5_init_ctx (&ctx);
10664 fold_checksum_tree (op0, &ctx, ht);
10665 md5_finish_ctx (&ctx, checksum_after_op0);
10668 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10669 fold_check_failed (op0, tem);
10671 md5_init_ctx (&ctx);
10672 fold_checksum_tree (op1, &ctx, ht);
10673 md5_finish_ctx (&ctx, checksum_after_op1);
10676 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10677 fold_check_failed (op1, tem);
10679 md5_init_ctx (&ctx);
10680 fold_checksum_tree (op2, &ctx, ht);
10681 md5_finish_ctx (&ctx, checksum_after_op2);
10684 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10685 fold_check_failed (op2, tem);
10690 /* Perform constant folding and related simplification of initializer
10691 expression EXPR. These behave identically to "fold_buildN" but ignore
10692 potential run-time traps and exceptions that fold must preserve. */
10694 #define START_FOLD_INIT \
10695 int saved_signaling_nans = flag_signaling_nans;\
10696 int saved_trapping_math = flag_trapping_math;\
10697 int saved_rounding_math = flag_rounding_math;\
10698 int saved_trapv = flag_trapv;\
10699 flag_signaling_nans = 0;\
10700 flag_trapping_math = 0;\
10701 flag_rounding_math = 0;\
10704 #define END_FOLD_INIT \
10705 flag_signaling_nans = saved_signaling_nans;\
10706 flag_trapping_math = saved_trapping_math;\
10707 flag_rounding_math = saved_rounding_math;\
10708 flag_trapv = saved_trapv
10711 fold_build1_initializer (enum tree_code code, tree type, tree op)
10716 result = fold_build1 (code, type, op);
10723 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10728 result = fold_build2 (code, type, op0, op1);
10735 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10741 result = fold_build3 (code, type, op0, op1, op2);
10747 #undef START_FOLD_INIT
10748 #undef END_FOLD_INIT
10750 /* Determine if first argument is a multiple of second argument. Return 0 if
10751 it is not, or we cannot easily determined it to be.
10753 An example of the sort of thing we care about (at this point; this routine
10754 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10755 fold cases do now) is discovering that
10757 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10763 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10765 This code also handles discovering that
10767 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10769 is a multiple of 8 so we don't have to worry about dealing with a
10770 possible remainder.
10772 Note that we *look* inside a SAVE_EXPR only to determine how it was
10773 calculated; it is not safe for fold to do much of anything else with the
10774 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10775 at run time. For example, the latter example above *cannot* be implemented
10776 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10777 evaluation time of the original SAVE_EXPR is not necessarily the same at
10778 the time the new expression is evaluated. The only optimization of this
10779 sort that would be valid is changing
10781 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10785 SAVE_EXPR (I) * SAVE_EXPR (J)
10787 (where the same SAVE_EXPR (J) is used in the original and the
10788 transformed version). */
10791 multiple_of_p (tree type, tree top, tree bottom)
10793 if (operand_equal_p (top, bottom, 0))
10796 if (TREE_CODE (type) != INTEGER_TYPE)
10799 switch (TREE_CODE (top))
10802 /* Bitwise and provides a power of two multiple. If the mask is
10803 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10804 if (!integer_pow2p (bottom))
10809 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10810 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10814 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10815 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10818 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10822 op1 = TREE_OPERAND (top, 1);
10823 /* const_binop may not detect overflow correctly,
10824 so check for it explicitly here. */
10825 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10826 > TREE_INT_CST_LOW (op1)
10827 && TREE_INT_CST_HIGH (op1) == 0
10828 && 0 != (t1 = fold_convert (type,
10829 const_binop (LSHIFT_EXPR,
10832 && ! TREE_OVERFLOW (t1))
10833 return multiple_of_p (type, t1, bottom);
10838 /* Can't handle conversions from non-integral or wider integral type. */
10839 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10840 || (TYPE_PRECISION (type)
10841 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10844 /* .. fall through ... */
10847 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10850 if (TREE_CODE (bottom) != INTEGER_CST
10851 || (TYPE_UNSIGNED (type)
10852 && (tree_int_cst_sgn (top) < 0
10853 || tree_int_cst_sgn (bottom) < 0)))
10855 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10863 /* Return true if `t' is known to be non-negative. */
10866 tree_expr_nonnegative_p (tree t)
10868 if (t == error_mark_node)
10871 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10874 switch (TREE_CODE (t))
10877 /* We can't return 1 if flag_wrapv is set because
10878 ABS_EXPR<INT_MIN> = INT_MIN. */
10879 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10884 return tree_int_cst_sgn (t) >= 0;
10887 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10890 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10891 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10892 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10894 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10895 both unsigned and at least 2 bits shorter than the result. */
10896 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10897 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10898 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10900 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10901 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10902 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10903 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10905 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10906 TYPE_PRECISION (inner2)) + 1;
10907 return prec < TYPE_PRECISION (TREE_TYPE (t));
10913 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10915 /* x * x for floating point x is always non-negative. */
10916 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10918 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10919 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10922 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10923 both unsigned and their total bits is shorter than the result. */
10924 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10925 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10926 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10928 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10929 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10930 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10931 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10932 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10933 < TYPE_PRECISION (TREE_TYPE (t));
10939 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10940 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10946 case TRUNC_DIV_EXPR:
10947 case CEIL_DIV_EXPR:
10948 case FLOOR_DIV_EXPR:
10949 case ROUND_DIV_EXPR:
10950 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10951 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10953 case TRUNC_MOD_EXPR:
10954 case CEIL_MOD_EXPR:
10955 case FLOOR_MOD_EXPR:
10956 case ROUND_MOD_EXPR:
10958 case NON_LVALUE_EXPR:
10960 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10962 case COMPOUND_EXPR:
10964 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10967 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10970 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10971 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10975 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10976 tree outer_type = TREE_TYPE (t);
10978 if (TREE_CODE (outer_type) == REAL_TYPE)
10980 if (TREE_CODE (inner_type) == REAL_TYPE)
10981 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10982 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10984 if (TYPE_UNSIGNED (inner_type))
10986 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10989 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10991 if (TREE_CODE (inner_type) == REAL_TYPE)
10992 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10993 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10994 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10995 && TYPE_UNSIGNED (inner_type);
11002 tree temp = TARGET_EXPR_SLOT (t);
11003 t = TARGET_EXPR_INITIAL (t);
11005 /* If the initializer is non-void, then it's a normal expression
11006 that will be assigned to the slot. */
11007 if (!VOID_TYPE_P (t))
11008 return tree_expr_nonnegative_p (t);
11010 /* Otherwise, the initializer sets the slot in some way. One common
11011 way is an assignment statement at the end of the initializer. */
11014 if (TREE_CODE (t) == BIND_EXPR)
11015 t = expr_last (BIND_EXPR_BODY (t));
11016 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11017 || TREE_CODE (t) == TRY_CATCH_EXPR)
11018 t = expr_last (TREE_OPERAND (t, 0));
11019 else if (TREE_CODE (t) == STATEMENT_LIST)
11024 if (TREE_CODE (t) == MODIFY_EXPR
11025 && TREE_OPERAND (t, 0) == temp)
11026 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11033 tree fndecl = get_callee_fndecl (t);
11034 tree arglist = TREE_OPERAND (t, 1);
11035 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11036 switch (DECL_FUNCTION_CODE (fndecl))
11038 #define CASE_BUILTIN_F(BUILT_IN_FN) \
11039 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
11040 #define CASE_BUILTIN_I(BUILT_IN_FN) \
11041 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
11043 CASE_BUILTIN_F (BUILT_IN_ACOS)
11044 CASE_BUILTIN_F (BUILT_IN_ACOSH)
11045 CASE_BUILTIN_F (BUILT_IN_CABS)
11046 CASE_BUILTIN_F (BUILT_IN_COSH)
11047 CASE_BUILTIN_F (BUILT_IN_ERFC)
11048 CASE_BUILTIN_F (BUILT_IN_EXP)
11049 CASE_BUILTIN_F (BUILT_IN_EXP10)
11050 CASE_BUILTIN_F (BUILT_IN_EXP2)
11051 CASE_BUILTIN_F (BUILT_IN_FABS)
11052 CASE_BUILTIN_F (BUILT_IN_FDIM)
11053 CASE_BUILTIN_F (BUILT_IN_HYPOT)
11054 CASE_BUILTIN_F (BUILT_IN_POW10)
11055 CASE_BUILTIN_I (BUILT_IN_FFS)
11056 CASE_BUILTIN_I (BUILT_IN_PARITY)
11057 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
11061 CASE_BUILTIN_F (BUILT_IN_SQRT)
11062 /* sqrt(-0.0) is -0.0. */
11063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11065 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11067 CASE_BUILTIN_F (BUILT_IN_ASINH)
11068 CASE_BUILTIN_F (BUILT_IN_ATAN)
11069 CASE_BUILTIN_F (BUILT_IN_ATANH)
11070 CASE_BUILTIN_F (BUILT_IN_CBRT)
11071 CASE_BUILTIN_F (BUILT_IN_CEIL)
11072 CASE_BUILTIN_F (BUILT_IN_ERF)
11073 CASE_BUILTIN_F (BUILT_IN_EXPM1)
11074 CASE_BUILTIN_F (BUILT_IN_FLOOR)
11075 CASE_BUILTIN_F (BUILT_IN_FMOD)
11076 CASE_BUILTIN_F (BUILT_IN_FREXP)
11077 CASE_BUILTIN_F (BUILT_IN_LCEIL)
11078 CASE_BUILTIN_F (BUILT_IN_LDEXP)
11079 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11080 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11081 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11082 CASE_BUILTIN_F (BUILT_IN_LLRINT)
11083 CASE_BUILTIN_F (BUILT_IN_LLROUND)
11084 CASE_BUILTIN_F (BUILT_IN_LRINT)
11085 CASE_BUILTIN_F (BUILT_IN_LROUND)
11086 CASE_BUILTIN_F (BUILT_IN_MODF)
11087 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11088 CASE_BUILTIN_F (BUILT_IN_POW)
11089 CASE_BUILTIN_F (BUILT_IN_RINT)
11090 CASE_BUILTIN_F (BUILT_IN_ROUND)
11091 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11092 CASE_BUILTIN_F (BUILT_IN_SINH)
11093 CASE_BUILTIN_F (BUILT_IN_TANH)
11094 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11095 /* True if the 1st argument is nonnegative. */
11096 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11098 CASE_BUILTIN_F (BUILT_IN_FMAX)
11099 /* True if the 1st OR 2nd arguments are nonnegative. */
11100 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11101 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11103 CASE_BUILTIN_F (BUILT_IN_FMIN)
11104 /* True if the 1st AND 2nd arguments are nonnegative. */
11105 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11106 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11108 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11109 /* True if the 2nd argument is nonnegative. */
11110 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11114 #undef CASE_BUILTIN_F
11115 #undef CASE_BUILTIN_I
11119 /* ... fall through ... */
11122 if (truth_value_p (TREE_CODE (t)))
11123 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11127 /* We don't know sign of `t', so be conservative and return false. */
11131 /* Return true when T is an address and is known to be nonzero.
11132 For floating point we further ensure that T is not denormal.
11133 Similar logic is present in nonzero_address in rtlanal.h. */
11136 tree_expr_nonzero_p (tree t)
11138 tree type = TREE_TYPE (t);
11140 /* Doing something useful for floating point would need more work. */
11141 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11144 switch (TREE_CODE (t))
11147 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11150 /* We used to test for !integer_zerop here. This does not work correctly
11151 if TREE_CONSTANT_OVERFLOW (t). */
11152 return (TREE_INT_CST_LOW (t) != 0
11153 || TREE_INT_CST_HIGH (t) != 0);
11156 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11158 /* With the presence of negative values it is hard
11159 to say something. */
11160 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11161 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11163 /* One of operands must be positive and the other non-negative. */
11164 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11165 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11170 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11172 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11173 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11179 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11180 tree outer_type = TREE_TYPE (t);
11182 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11183 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11189 tree base = get_base_address (TREE_OPERAND (t, 0));
11194 /* Weak declarations may link to NULL. */
11195 if (VAR_OR_FUNCTION_DECL_P (base))
11196 return !DECL_WEAK (base);
11198 /* Constants are never weak. */
11199 if (CONSTANT_CLASS_P (base))
11206 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11207 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11210 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11211 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11214 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11216 /* When both operands are nonzero, then MAX must be too. */
11217 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11220 /* MAX where operand 0 is positive is positive. */
11221 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11223 /* MAX where operand 1 is positive is positive. */
11224 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11225 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11229 case COMPOUND_EXPR:
11232 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11235 case NON_LVALUE_EXPR:
11236 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11239 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11240 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11243 return alloca_call_p (t);
11251 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11252 attempt to fold the expression to a constant without modifying TYPE,
11255 If the expression could be simplified to a constant, then return
11256 the constant. If the expression would not be simplified to a
11257 constant, then return NULL_TREE. */
11260 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11262 tree tem = fold_binary (code, type, op0, op1);
11263 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11266 /* Given the components of a unary expression CODE, TYPE and OP0,
11267 attempt to fold the expression to a constant without modifying
11270 If the expression could be simplified to a constant, then return
11271 the constant. If the expression would not be simplified to a
11272 constant, then return NULL_TREE. */
11275 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11277 tree tem = fold_unary (code, type, op0);
11278 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11281 /* If EXP represents referencing an element in a constant string
11282 (either via pointer arithmetic or array indexing), return the
11283 tree representing the value accessed, otherwise return NULL. */
11286 fold_read_from_constant_string (tree exp)
11288 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11290 tree exp1 = TREE_OPERAND (exp, 0);
11294 if (TREE_CODE (exp) == INDIRECT_REF)
11295 string = string_constant (exp1, &index);
11298 tree low_bound = array_ref_low_bound (exp);
11299 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11301 /* Optimize the special-case of a zero lower bound.
11303 We convert the low_bound to sizetype to avoid some problems
11304 with constant folding. (E.g. suppose the lower bound is 1,
11305 and its mode is QI. Without the conversion,l (ARRAY
11306 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11307 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11308 if (! integer_zerop (low_bound))
11309 index = size_diffop (index, fold_convert (sizetype, low_bound));
11315 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11316 && TREE_CODE (string) == STRING_CST
11317 && TREE_CODE (index) == INTEGER_CST
11318 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11319 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11321 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11322 return fold_convert (TREE_TYPE (exp),
11323 build_int_cst (NULL_TREE,
11324 (TREE_STRING_POINTER (string)
11325 [TREE_INT_CST_LOW (index)])));
11330 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11331 an integer constant or real constant.
11333 TYPE is the type of the result. */
11336 fold_negate_const (tree arg0, tree type)
11338 tree t = NULL_TREE;
11340 switch (TREE_CODE (arg0))
11344 unsigned HOST_WIDE_INT low;
11345 HOST_WIDE_INT high;
11346 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11347 TREE_INT_CST_HIGH (arg0),
11349 t = build_int_cst_wide (type, low, high);
11350 t = force_fit_type (t, 1,
11351 (overflow | TREE_OVERFLOW (arg0))
11352 && !TYPE_UNSIGNED (type),
11353 TREE_CONSTANT_OVERFLOW (arg0));
11358 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11362 gcc_unreachable ();
11368 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11369 an integer constant or real constant.
11371 TYPE is the type of the result. */
11374 fold_abs_const (tree arg0, tree type)
11376 tree t = NULL_TREE;
11378 switch (TREE_CODE (arg0))
11381 /* If the value is unsigned, then the absolute value is
11382 the same as the ordinary value. */
11383 if (TYPE_UNSIGNED (type))
11385 /* Similarly, if the value is non-negative. */
11386 else if (INT_CST_LT (integer_minus_one_node, arg0))
11388 /* If the value is negative, then the absolute value is
11392 unsigned HOST_WIDE_INT low;
11393 HOST_WIDE_INT high;
11394 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11395 TREE_INT_CST_HIGH (arg0),
11397 t = build_int_cst_wide (type, low, high);
11398 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11399 TREE_CONSTANT_OVERFLOW (arg0));
11404 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11405 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11411 gcc_unreachable ();
11417 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11418 constant. TYPE is the type of the result. */
11421 fold_not_const (tree arg0, tree type)
11423 tree t = NULL_TREE;
11425 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11427 t = build_int_cst_wide (type,
11428 ~ TREE_INT_CST_LOW (arg0),
11429 ~ TREE_INT_CST_HIGH (arg0));
11430 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11431 TREE_CONSTANT_OVERFLOW (arg0));
11436 /* Given CODE, a relational operator, the target type, TYPE and two
11437 constant operands OP0 and OP1, return the result of the
11438 relational operation. If the result is not a compile time
11439 constant, then return NULL_TREE. */
11442 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11444 int result, invert;
11446 /* From here on, the only cases we handle are when the result is
11447 known to be a constant. */
11449 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11451 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11452 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11454 /* Handle the cases where either operand is a NaN. */
11455 if (real_isnan (c0) || real_isnan (c1))
11465 case UNORDERED_EXPR:
11479 if (flag_trapping_math)
11485 gcc_unreachable ();
11488 return constant_boolean_node (result, type);
11491 return constant_boolean_node (real_compare (code, c0, c1), type);
11494 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11496 To compute GT, swap the arguments and do LT.
11497 To compute GE, do LT and invert the result.
11498 To compute LE, swap the arguments, do LT and invert the result.
11499 To compute NE, do EQ and invert the result.
11501 Therefore, the code below must handle only EQ and LT. */
11503 if (code == LE_EXPR || code == GT_EXPR)
11508 code = swap_tree_comparison (code);
11511 /* Note that it is safe to invert for real values here because we
11512 have already handled the one case that it matters. */
11515 if (code == NE_EXPR || code == GE_EXPR)
11518 code = invert_tree_comparison (code, false);
11521 /* Compute a result for LT or EQ if args permit;
11522 Otherwise return T. */
11523 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11525 if (code == EQ_EXPR)
11526 result = tree_int_cst_equal (op0, op1);
11527 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11528 result = INT_CST_LT_UNSIGNED (op0, op1);
11530 result = INT_CST_LT (op0, op1);
11537 return constant_boolean_node (result, type);
11540 /* Build an expression for the a clean point containing EXPR with type TYPE.
11541 Don't build a cleanup point expression for EXPR which don't have side
11545 fold_build_cleanup_point_expr (tree type, tree expr)
11547 /* If the expression does not have side effects then we don't have to wrap
11548 it with a cleanup point expression. */
11549 if (!TREE_SIDE_EFFECTS (expr))
11552 /* If the expression is a return, check to see if the expression inside the
11553 return has no side effects or the right hand side of the modify expression
11554 inside the return. If either don't have side effects set we don't need to
11555 wrap the expression in a cleanup point expression. Note we don't check the
11556 left hand side of the modify because it should always be a return decl. */
11557 if (TREE_CODE (expr) == RETURN_EXPR)
11559 tree op = TREE_OPERAND (expr, 0);
11560 if (!op || !TREE_SIDE_EFFECTS (op))
11562 op = TREE_OPERAND (op, 1);
11563 if (!TREE_SIDE_EFFECTS (op))
11567 return build1 (CLEANUP_POINT_EXPR, type, expr);
11570 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11571 avoid confusing the gimplify process. */
11574 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11576 /* The size of the object is not relevant when talking about its address. */
11577 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11578 t = TREE_OPERAND (t, 0);
11580 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11581 if (TREE_CODE (t) == INDIRECT_REF
11582 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11584 t = TREE_OPERAND (t, 0);
11585 if (TREE_TYPE (t) != ptrtype)
11586 t = build1 (NOP_EXPR, ptrtype, t);
11592 while (handled_component_p (base))
11593 base = TREE_OPERAND (base, 0);
11595 TREE_ADDRESSABLE (base) = 1;
11597 t = build1 (ADDR_EXPR, ptrtype, t);
11604 build_fold_addr_expr (tree t)
11606 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11609 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11610 of an indirection through OP0, or NULL_TREE if no simplification is
11614 fold_indirect_ref_1 (tree type, tree op0)
11620 subtype = TREE_TYPE (sub);
11621 if (!POINTER_TYPE_P (subtype))
11624 if (TREE_CODE (sub) == ADDR_EXPR)
11626 tree op = TREE_OPERAND (sub, 0);
11627 tree optype = TREE_TYPE (op);
11628 /* *&CONST_DECL -> to the value of the const decl. */
11629 if (TREE_CODE (op) == CONST_DECL)
11630 return DECL_INITIAL (op);
11632 if (type == optype)
11634 /* *(foo *)&fooarray => fooarray[0] */
11635 else if (TREE_CODE (optype) == ARRAY_TYPE
11636 && type == TREE_TYPE (optype))
11638 tree type_domain = TYPE_DOMAIN (optype);
11639 tree min_val = size_zero_node;
11640 if (type_domain && TYPE_MIN_VALUE (type_domain))
11641 min_val = TYPE_MIN_VALUE (type_domain);
11642 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11646 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11647 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11648 && type == TREE_TYPE (TREE_TYPE (subtype)))
11651 tree min_val = size_zero_node;
11652 sub = build_fold_indirect_ref (sub);
11653 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11654 if (type_domain && TYPE_MIN_VALUE (type_domain))
11655 min_val = TYPE_MIN_VALUE (type_domain);
11656 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11662 /* Builds an expression for an indirection through T, simplifying some
11666 build_fold_indirect_ref (tree t)
11668 tree type = TREE_TYPE (TREE_TYPE (t));
11669 tree sub = fold_indirect_ref_1 (type, t);
11674 return build1 (INDIRECT_REF, type, t);
11677 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11680 fold_indirect_ref (tree t)
11682 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11690 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11691 whose result is ignored. The type of the returned tree need not be
11692 the same as the original expression. */
11695 fold_ignored_result (tree t)
11697 if (!TREE_SIDE_EFFECTS (t))
11698 return integer_zero_node;
11701 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11704 t = TREE_OPERAND (t, 0);
11708 case tcc_comparison:
11709 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11710 t = TREE_OPERAND (t, 0);
11711 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11712 t = TREE_OPERAND (t, 1);
11717 case tcc_expression:
11718 switch (TREE_CODE (t))
11720 case COMPOUND_EXPR:
11721 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11723 t = TREE_OPERAND (t, 0);
11727 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11728 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11730 t = TREE_OPERAND (t, 0);
11743 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11744 This can only be applied to objects of a sizetype. */
11747 round_up (tree value, int divisor)
11749 tree div = NULL_TREE;
11751 gcc_assert (divisor > 0);
11755 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11756 have to do anything. Only do this when we are not given a const,
11757 because in that case, this check is more expensive than just
11759 if (TREE_CODE (value) != INTEGER_CST)
11761 div = build_int_cst (TREE_TYPE (value), divisor);
11763 if (multiple_of_p (TREE_TYPE (value), value, div))
11767 /* If divisor is a power of two, simplify this to bit manipulation. */
11768 if (divisor == (divisor & -divisor))
11772 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11773 value = size_binop (PLUS_EXPR, value, t);
11774 t = build_int_cst (TREE_TYPE (value), -divisor);
11775 value = size_binop (BIT_AND_EXPR, value, t);
11780 div = build_int_cst (TREE_TYPE (value), divisor);
11781 value = size_binop (CEIL_DIV_EXPR, value, div);
11782 value = size_binop (MULT_EXPR, value, div);
11788 /* Likewise, but round down. */
11791 round_down (tree value, int divisor)
11793 tree div = NULL_TREE;
11795 gcc_assert (divisor > 0);
11799 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11800 have to do anything. Only do this when we are not given a const,
11801 because in that case, this check is more expensive than just
11803 if (TREE_CODE (value) != INTEGER_CST)
11805 div = build_int_cst (TREE_TYPE (value), divisor);
11807 if (multiple_of_p (TREE_TYPE (value), value, div))
11811 /* If divisor is a power of two, simplify this to bit manipulation. */
11812 if (divisor == (divisor & -divisor))
11816 t = build_int_cst (TREE_TYPE (value), -divisor);
11817 value = size_binop (BIT_AND_EXPR, value, t);
11822 div = build_int_cst (TREE_TYPE (value), divisor);
11823 value = size_binop (FLOOR_DIV_EXPR, value, div);
11824 value = size_binop (MULT_EXPR, value, div);
11830 /* Returns the pointer to the base of the object addressed by EXP and
11831 extracts the information about the offset of the access, storing it
11832 to PBITPOS and POFFSET. */
11835 split_address_to_core_and_offset (tree exp,
11836 HOST_WIDE_INT *pbitpos, tree *poffset)
11839 enum machine_mode mode;
11840 int unsignedp, volatilep;
11841 HOST_WIDE_INT bitsize;
11843 if (TREE_CODE (exp) == ADDR_EXPR)
11845 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11846 poffset, &mode, &unsignedp, &volatilep,
11848 core = build_fold_addr_expr (core);
11854 *poffset = NULL_TREE;
11860 /* Returns true if addresses of E1 and E2 differ by a constant, false
11861 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11864 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11867 HOST_WIDE_INT bitpos1, bitpos2;
11868 tree toffset1, toffset2, tdiff, type;
11870 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11871 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11873 if (bitpos1 % BITS_PER_UNIT != 0
11874 || bitpos2 % BITS_PER_UNIT != 0
11875 || !operand_equal_p (core1, core2, 0))
11878 if (toffset1 && toffset2)
11880 type = TREE_TYPE (toffset1);
11881 if (type != TREE_TYPE (toffset2))
11882 toffset2 = fold_convert (type, toffset2);
11884 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11885 if (!cst_and_fits_in_hwi (tdiff))
11888 *diff = int_cst_value (tdiff);
11890 else if (toffset1 || toffset2)
11892 /* If only one of the offsets is non-constant, the difference cannot
11899 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11903 /* Simplify the floating point expression EXP when the sign of the
11904 result is not significant. Return NULL_TREE if no simplification
11908 fold_strip_sign_ops (tree exp)
11912 switch (TREE_CODE (exp))
11916 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11917 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11921 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11923 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11924 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11925 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11926 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11927 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11928 arg1 ? arg1 : TREE_OPERAND (exp, 1));