1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t) != INTEGER_CST)
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
270 h = h1 + h2 + (l < l1);
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 return (*hv & h1) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
327 for (j = 0; j < 4; j++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
838 /* Determine whether an expression T can be cheaply negated using
839 the function negate_expr. */
842 negate_expr_p (tree t)
844 unsigned HOST_WIDE_INT val;
851 type = TREE_TYPE (t);
854 switch (TREE_CODE (t))
857 if (TREE_UNSIGNED (type) || ! flag_trapv)
860 /* Check that -CST will not overflow type. */
861 prec = TYPE_PRECISION (type);
862 if (prec > HOST_BITS_PER_WIDE_INT)
864 if (TREE_INT_CST_LOW (t) != 0)
866 prec -= HOST_BITS_PER_WIDE_INT;
867 val = TREE_INT_CST_HIGH (t);
870 val = TREE_INT_CST_LOW (t);
871 if (prec < HOST_BITS_PER_WIDE_INT)
872 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
880 return negate_expr_p (TREE_REALPART (t))
881 && negate_expr_p (TREE_IMAGPART (t));
884 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
885 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1));
890 if (TREE_UNSIGNED (TREE_TYPE (t)))
896 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897 return negate_expr_p (TREE_OPERAND (t, 1))
898 || negate_expr_p (TREE_OPERAND (t, 0));
902 /* Negate -((double)float) as (double)(-float). */
903 if (TREE_CODE (type) == REAL_TYPE)
905 tree tem = strip_float_extensions (t);
907 return negate_expr_p (tem);
912 /* Negate -f(x) as f(-x). */
913 if (negate_mathfn_p (builtin_mathfn_code (t)))
914 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
923 /* Given T, an expression, return the negation of T. Allow for T to be
924 null, in which case return null. */
935 type = TREE_TYPE (t);
938 switch (TREE_CODE (t))
942 unsigned HOST_WIDE_INT low;
944 int overflow = neg_double (TREE_INT_CST_LOW (t),
945 TREE_INT_CST_HIGH (t),
947 tem = build_int_2 (low, high);
948 TREE_TYPE (tem) = type;
951 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952 TREE_CONSTANT_OVERFLOW (tem)
953 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
955 if (! TREE_OVERFLOW (tem)
956 || TREE_UNSIGNED (type)
962 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963 /* Two's complement FP formats, such as c4x, may overflow. */
964 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965 return fold_convert (type, tem);
970 tree rpart = negate_expr (TREE_REALPART (t));
971 tree ipart = negate_expr (TREE_IMAGPART (t));
973 if ((TREE_CODE (rpart) == REAL_CST
974 && TREE_CODE (ipart) == REAL_CST)
975 || (TREE_CODE (rpart) == INTEGER_CST
976 && TREE_CODE (ipart) == INTEGER_CST))
977 return build_complex (type, rpart, ipart);
982 return fold_convert (type, TREE_OPERAND (t, 0));
985 /* - (A - B) -> B - A */
986 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988 return fold_convert (type,
989 fold (build (MINUS_EXPR, TREE_TYPE (t),
991 TREE_OPERAND (t, 0))));
995 if (TREE_UNSIGNED (TREE_TYPE (t)))
1001 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1003 tem = TREE_OPERAND (t, 1);
1004 if (negate_expr_p (tem))
1005 return fold_convert (type,
1006 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007 TREE_OPERAND (t, 0),
1008 negate_expr (tem))));
1009 tem = TREE_OPERAND (t, 0);
1010 if (negate_expr_p (tem))
1011 return fold_convert (type,
1012 fold (build (TREE_CODE (t), TREE_TYPE (t),
1014 TREE_OPERAND (t, 1))));
1019 /* Convert -((double)float) into (double)(-float). */
1020 if (TREE_CODE (type) == REAL_TYPE)
1022 tem = strip_float_extensions (t);
1023 if (tem != t && negate_expr_p (tem))
1024 return fold_convert (type, negate_expr (tem));
1029 /* Negate -f(x) as f(-x). */
1030 if (negate_mathfn_p (builtin_mathfn_code (t))
1031 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1033 tree fndecl, arg, arglist;
1035 fndecl = get_callee_fndecl (t);
1036 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037 arglist = build_tree_list (NULL_TREE, arg);
1038 return build_function_call_expr (fndecl, arglist);
1046 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047 return fold_convert (type, tem);
1050 /* Split a tree IN into a constant, literal and variable parts that could be
1051 combined with CODE to make IN. "constant" means an expression with
1052 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1053 commutative arithmetic operation. Store the constant part into *CONP,
1054 the literal in *LITP and return the variable part. If a part isn't
1055 present, set it to null. If the tree does not decompose in this way,
1056 return the entire tree as the variable part and the other parts as null.
1058 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1059 case, we negate an operand that was subtracted. Except if it is a
1060 literal for which we use *MINUS_LITP instead.
1062 If NEGATE_P is true, we are negating all of IN, again except a literal
1063 for which we use *MINUS_LITP instead.
1065 If IN is itself a literal or constant, return it as appropriate.
1067 Note that we do not guarantee that any of the three values will be the
1068 same type as IN, but they will have the same signedness and mode. */
1071 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072 tree *minus_litp, int negate_p)
1080 /* Strip any conversions that don't change the machine mode or signedness. */
1081 STRIP_SIGN_NOPS (in);
1083 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1085 else if (TREE_CODE (in) == code
1086 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087 /* We can associate addition and subtraction together (even
1088 though the C standard doesn't say so) for integers because
1089 the value is not affected. For reals, the value might be
1090 affected, so we can't. */
1091 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1094 tree op0 = TREE_OPERAND (in, 0);
1095 tree op1 = TREE_OPERAND (in, 1);
1096 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1099 /* First see if either of the operands is a literal, then a constant. */
1100 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101 *litp = op0, op0 = 0;
1102 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1105 if (op0 != 0 && TREE_CONSTANT (op0))
1106 *conp = op0, op0 = 0;
1107 else if (op1 != 0 && TREE_CONSTANT (op1))
1108 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1110 /* If we haven't dealt with either operand, this is not a case we can
1111 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1112 if (op0 != 0 && op1 != 0)
1117 var = op1, neg_var_p = neg1_p;
1119 /* Now do any needed negations. */
1121 *minus_litp = *litp, *litp = 0;
1123 *conp = negate_expr (*conp);
1125 var = negate_expr (var);
1127 else if (TREE_CONSTANT (in))
1135 *minus_litp = *litp, *litp = 0;
1136 else if (*minus_litp)
1137 *litp = *minus_litp, *minus_litp = 0;
1138 *conp = negate_expr (*conp);
1139 var = negate_expr (var);
1145 /* Re-associate trees split by the above function. T1 and T2 are either
1146 expressions to associate or null. Return the new expression, if any. If
1147 we build an operation, do it in TYPE and with CODE. */
1150 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1157 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158 try to fold this since we will have infinite recursion. But do
1159 deal with any NEGATE_EXPRs. */
1160 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1163 if (code == PLUS_EXPR)
1165 if (TREE_CODE (t1) == NEGATE_EXPR)
1166 return build (MINUS_EXPR, type, fold_convert (type, t2),
1167 fold_convert (type, TREE_OPERAND (t1, 0)));
1168 else if (TREE_CODE (t2) == NEGATE_EXPR)
1169 return build (MINUS_EXPR, type, fold_convert (type, t1),
1170 fold_convert (type, TREE_OPERAND (t2, 0)));
1172 return build (code, type, fold_convert (type, t1),
1173 fold_convert (type, t2));
1176 return fold (build (code, type, fold_convert (type, t1),
1177 fold_convert (type, t2)));
1180 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1181 to produce a new constant.
1183 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1186 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1188 unsigned HOST_WIDE_INT int1l, int2l;
1189 HOST_WIDE_INT int1h, int2h;
1190 unsigned HOST_WIDE_INT low;
1192 unsigned HOST_WIDE_INT garbagel;
1193 HOST_WIDE_INT garbageh;
1195 tree type = TREE_TYPE (arg1);
1196 int uns = TREE_UNSIGNED (type);
1198 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1200 int no_overflow = 0;
1202 int1l = TREE_INT_CST_LOW (arg1);
1203 int1h = TREE_INT_CST_HIGH (arg1);
1204 int2l = TREE_INT_CST_LOW (arg2);
1205 int2h = TREE_INT_CST_HIGH (arg2);
1210 low = int1l | int2l, hi = int1h | int2h;
1214 low = int1l ^ int2l, hi = int1h ^ int2h;
1218 low = int1l & int2l, hi = int1h & int2h;
1224 /* It's unclear from the C standard whether shifts can overflow.
1225 The following code ignores overflow; perhaps a C standard
1226 interpretation ruling is needed. */
1227 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1235 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1240 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1244 neg_double (int2l, int2h, &low, &hi);
1245 add_double (int1l, int1h, low, hi, &low, &hi);
1246 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1250 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1253 case TRUNC_DIV_EXPR:
1254 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255 case EXACT_DIV_EXPR:
1256 /* This is a shortcut for a common special case. */
1257 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258 && ! TREE_CONSTANT_OVERFLOW (arg1)
1259 && ! TREE_CONSTANT_OVERFLOW (arg2)
1260 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1262 if (code == CEIL_DIV_EXPR)
1265 low = int1l / int2l, hi = 0;
1269 /* ... fall through ... */
1271 case ROUND_DIV_EXPR:
1272 if (int2h == 0 && int2l == 1)
1274 low = int1l, hi = int1h;
1277 if (int1l == int2l && int1h == int2h
1278 && ! (int1l == 0 && int1h == 0))
1283 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284 &low, &hi, &garbagel, &garbageh);
1287 case TRUNC_MOD_EXPR:
1288 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289 /* This is a shortcut for a common special case. */
1290 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291 && ! TREE_CONSTANT_OVERFLOW (arg1)
1292 && ! TREE_CONSTANT_OVERFLOW (arg2)
1293 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1295 if (code == CEIL_MOD_EXPR)
1297 low = int1l % int2l, hi = 0;
1301 /* ... fall through ... */
1303 case ROUND_MOD_EXPR:
1304 overflow = div_and_round_double (code, uns,
1305 int1l, int1h, int2l, int2h,
1306 &garbagel, &garbageh, &low, &hi);
1312 low = (((unsigned HOST_WIDE_INT) int1h
1313 < (unsigned HOST_WIDE_INT) int2h)
1314 || (((unsigned HOST_WIDE_INT) int1h
1315 == (unsigned HOST_WIDE_INT) int2h)
1318 low = (int1h < int2h
1319 || (int1h == int2h && int1l < int2l));
1321 if (low == (code == MIN_EXPR))
1322 low = int1l, hi = int1h;
1324 low = int2l, hi = int2h;
1331 /* If this is for a sizetype, can be represented as one (signed)
1332 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1335 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338 return size_int_type_wide (low, type);
1341 t = build_int_2 (low, hi);
1342 TREE_TYPE (t) = TREE_TYPE (arg1);
1347 ? (!uns || is_sizetype) && overflow
1348 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1350 | TREE_OVERFLOW (arg1)
1351 | TREE_OVERFLOW (arg2));
1353 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354 So check if force_fit_type truncated the value. */
1356 && ! TREE_OVERFLOW (t)
1357 && (TREE_INT_CST_HIGH (t) != hi
1358 || TREE_INT_CST_LOW (t) != low))
1359 TREE_OVERFLOW (t) = 1;
1361 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362 | TREE_CONSTANT_OVERFLOW (arg1)
1363 | TREE_CONSTANT_OVERFLOW (arg2));
1367 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368 constant. We assume ARG1 and ARG2 have the same data type, or at least
1369 are the same kind of constant and the same machine mode.
1371 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1374 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1379 if (TREE_CODE (arg1) == INTEGER_CST)
1380 return int_const_binop (code, arg1, arg2, notrunc);
1382 if (TREE_CODE (arg1) == REAL_CST)
1384 enum machine_mode mode;
1387 REAL_VALUE_TYPE value;
1390 d1 = TREE_REAL_CST (arg1);
1391 d2 = TREE_REAL_CST (arg2);
1393 type = TREE_TYPE (arg1);
1394 mode = TYPE_MODE (type);
1396 /* Don't perform operation if we honor signaling NaNs and
1397 either operand is a NaN. */
1398 if (HONOR_SNANS (mode)
1399 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1402 /* Don't perform operation if it would raise a division
1403 by zero exception. */
1404 if (code == RDIV_EXPR
1405 && REAL_VALUES_EQUAL (d2, dconst0)
1406 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1409 /* If either operand is a NaN, just return it. Otherwise, set up
1410 for floating-point trap; we return an overflow. */
1411 if (REAL_VALUE_ISNAN (d1))
1413 else if (REAL_VALUE_ISNAN (d2))
1416 REAL_ARITHMETIC (value, code, d1, d2);
1418 t = build_real (type, real_value_truncate (mode, value));
1421 = (force_fit_type (t, 0)
1422 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423 TREE_CONSTANT_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2);
1429 if (TREE_CODE (arg1) == COMPLEX_CST)
1431 tree type = TREE_TYPE (arg1);
1432 tree r1 = TREE_REALPART (arg1);
1433 tree i1 = TREE_IMAGPART (arg1);
1434 tree r2 = TREE_REALPART (arg2);
1435 tree i2 = TREE_IMAGPART (arg2);
1441 t = build_complex (type,
1442 const_binop (PLUS_EXPR, r1, r2, notrunc),
1443 const_binop (PLUS_EXPR, i1, i2, notrunc));
1447 t = build_complex (type,
1448 const_binop (MINUS_EXPR, r1, r2, notrunc),
1449 const_binop (MINUS_EXPR, i1, i2, notrunc));
1453 t = build_complex (type,
1454 const_binop (MINUS_EXPR,
1455 const_binop (MULT_EXPR,
1457 const_binop (MULT_EXPR,
1460 const_binop (PLUS_EXPR,
1461 const_binop (MULT_EXPR,
1463 const_binop (MULT_EXPR,
1471 = const_binop (PLUS_EXPR,
1472 const_binop (MULT_EXPR, r2, r2, notrunc),
1473 const_binop (MULT_EXPR, i2, i2, notrunc),
1476 t = build_complex (type,
1478 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1479 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1480 const_binop (PLUS_EXPR,
1481 const_binop (MULT_EXPR, r1, r2,
1483 const_binop (MULT_EXPR, i1, i2,
1486 magsquared, notrunc),
1488 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1489 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1490 const_binop (MINUS_EXPR,
1491 const_binop (MULT_EXPR, i1, r2,
1493 const_binop (MULT_EXPR, r1, i2,
1496 magsquared, notrunc));
1508 /* These are the hash table functions for the hash table of INTEGER_CST
1509 nodes of a sizetype. */
1511 /* Return the hash code code X, an INTEGER_CST. */
1514 size_htab_hash (const void *x)
1518 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1519 ^ htab_hash_pointer (TREE_TYPE (t))
1520 ^ (TREE_OVERFLOW (t) << 20));
1523 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1524 is the same as that given by *Y, which is the same. */
1527 size_htab_eq (const void *x, const void *y)
1532 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1533 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1534 && TREE_TYPE (xt) == TREE_TYPE (yt)
1535 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1538 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1539 bits are given by NUMBER and of the sizetype represented by KIND. */
1542 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1544 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1547 /* Likewise, but the desired type is specified explicitly. */
1549 static GTY (()) tree new_const;
1550 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1554 size_int_type_wide (HOST_WIDE_INT number, tree type)
1560 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1561 new_const = make_node (INTEGER_CST);
1564 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1565 hash table, we return the value from the hash table. Otherwise, we
1566 place that in the hash table and make a new node for the next time. */
1567 TREE_INT_CST_LOW (new_const) = number;
1568 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1569 TREE_TYPE (new_const) = type;
1570 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1571 = force_fit_type (new_const, 0);
1573 slot = htab_find_slot (size_htab, new_const, INSERT);
1579 new_const = make_node (INTEGER_CST);
1583 return (tree) *slot;
1586 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1587 is a tree code. The type of the result is taken from the operands.
1588 Both must be the same type integer type and it must be a size type.
1589 If the operands are constant, so is the result. */
1592 size_binop (enum tree_code code, tree arg0, tree arg1)
1594 tree type = TREE_TYPE (arg0);
1596 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1597 || type != TREE_TYPE (arg1))
1600 /* Handle the special case of two integer constants faster. */
1601 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1603 /* And some specific cases even faster than that. */
1604 if (code == PLUS_EXPR && integer_zerop (arg0))
1606 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1607 && integer_zerop (arg1))
1609 else if (code == MULT_EXPR && integer_onep (arg0))
1612 /* Handle general case of two integer constants. */
1613 return int_const_binop (code, arg0, arg1, 0);
1616 if (arg0 == error_mark_node || arg1 == error_mark_node)
1617 return error_mark_node;
1619 return fold (build (code, type, arg0, arg1));
1622 /* Given two values, either both of sizetype or both of bitsizetype,
1623 compute the difference between the two values. Return the value
1624 in signed type corresponding to the type of the operands. */
1627 size_diffop (tree arg0, tree arg1)
1629 tree type = TREE_TYPE (arg0);
1632 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1633 || type != TREE_TYPE (arg1))
1636 /* If the type is already signed, just do the simple thing. */
1637 if (! TREE_UNSIGNED (type))
1638 return size_binop (MINUS_EXPR, arg0, arg1);
1640 ctype = (type == bitsizetype || type == ubitsizetype
1641 ? sbitsizetype : ssizetype);
1643 /* If either operand is not a constant, do the conversions to the signed
1644 type and subtract. The hardware will do the right thing with any
1645 overflow in the subtraction. */
1646 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1647 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1648 fold_convert (ctype, arg1));
1650 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1651 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1652 overflow) and negate (which can't either). Special-case a result
1653 of zero while we're here. */
1654 if (tree_int_cst_equal (arg0, arg1))
1655 return fold_convert (ctype, integer_zero_node);
1656 else if (tree_int_cst_lt (arg1, arg0))
1657 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1659 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1660 fold_convert (ctype, size_binop (MINUS_EXPR,
1665 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1666 type TYPE. If no simplification can be done return NULL_TREE. */
1669 fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1675 if (TREE_TYPE (arg1) == type)
1678 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1680 if (TREE_CODE (arg1) == INTEGER_CST)
1682 /* If we would build a constant wider than GCC supports,
1683 leave the conversion unfolded. */
1684 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1687 /* If we are trying to make a sizetype for a small integer, use
1688 size_int to pick up cached types to reduce duplicate nodes. */
1689 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1690 && !TREE_CONSTANT_OVERFLOW (arg1)
1691 && compare_tree_int (arg1, 10000) < 0)
1692 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1694 /* Given an integer constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1697 TREE_INT_CST_HIGH (arg1));
1698 TREE_TYPE (t) = type;
1699 /* Indicate an overflow if (1) ARG1 already overflowed,
1700 or (2) force_fit_type indicates an overflow.
1701 Tell force_fit_type that an overflow has already occurred
1702 if ARG1 is a too-large unsigned value and T is signed.
1703 But don't indicate an overflow if converting a pointer. */
1705 = ((force_fit_type (t,
1706 (TREE_INT_CST_HIGH (arg1) < 0
1707 && (TREE_UNSIGNED (type)
1708 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1709 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1710 || TREE_OVERFLOW (arg1));
1711 TREE_CONSTANT_OVERFLOW (t)
1712 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1715 else if (TREE_CODE (arg1) == REAL_CST)
1717 /* The following code implements the floating point to integer
1718 conversion rules required by the Java Language Specification,
1719 that IEEE NaNs are mapped to zero and values that overflow
1720 the target precision saturate, i.e. values greater than
1721 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1722 are mapped to INT_MIN. These semantics are allowed by the
1723 C and C++ standards that simply state that the behavior of
1724 FP-to-integer conversion is unspecified upon overflow. */
1726 HOST_WIDE_INT high, low;
1728 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1729 /* If x is NaN, return zero and show we have an overflow. */
1730 if (REAL_VALUE_ISNAN (x))
1737 /* See if X will be in range after truncation towards 0.
1738 To compensate for truncation, move the bounds away from 0,
1739 but reject if X exactly equals the adjusted bounds. */
1743 tree lt = TYPE_MIN_VALUE (type);
1744 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1745 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1746 if (! REAL_VALUES_LESS (l, x))
1749 high = TREE_INT_CST_HIGH (lt);
1750 low = TREE_INT_CST_LOW (lt);
1756 tree ut = TYPE_MAX_VALUE (type);
1759 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1760 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1761 if (! REAL_VALUES_LESS (x, u))
1764 high = TREE_INT_CST_HIGH (ut);
1765 low = TREE_INT_CST_LOW (ut);
1771 REAL_VALUE_TO_INT (&low, &high, x);
1773 t = build_int_2 (low, high);
1774 TREE_TYPE (t) = type;
1776 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1777 TREE_CONSTANT_OVERFLOW (t)
1778 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1782 else if (TREE_CODE (type) == REAL_TYPE)
1784 if (TREE_CODE (arg1) == INTEGER_CST)
1785 return build_real_from_int_cst (type, arg1);
1786 if (TREE_CODE (arg1) == REAL_CST)
1788 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1790 /* We make a copy of ARG1 so that we don't modify an
1791 existing constant tree. */
1792 t = copy_node (arg1);
1793 TREE_TYPE (t) = type;
1797 t = build_real (type,
1798 real_value_truncate (TYPE_MODE (type),
1799 TREE_REAL_CST (arg1)));
1802 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1803 TREE_CONSTANT_OVERFLOW (t)
1804 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1811 /* Convert expression ARG to type TYPE. Used by the middle-end for
1812 simple conversions in preference to calling the front-end's convert. */
1815 fold_convert (tree type, tree arg)
1817 tree orig = TREE_TYPE (arg);
1823 if (TREE_CODE (arg) == ERROR_MARK
1824 || TREE_CODE (type) == ERROR_MARK
1825 || TREE_CODE (orig) == ERROR_MARK)
1826 return error_mark_node;
1828 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1829 return fold (build1 (NOP_EXPR, type, arg));
1831 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1833 if (TREE_CODE (arg) == INTEGER_CST)
1835 tem = fold_convert_const (NOP_EXPR, type, arg);
1836 if (tem != NULL_TREE)
1839 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1840 return fold (build1 (NOP_EXPR, type, arg));
1841 if (TREE_CODE (orig) == COMPLEX_TYPE)
1843 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1844 return fold_convert (type, tem);
1846 if (TREE_CODE (orig) == VECTOR_TYPE
1847 && GET_MODE_SIZE (TYPE_MODE (type))
1848 == GET_MODE_SIZE (TYPE_MODE (orig)))
1849 return fold (build1 (NOP_EXPR, type, arg));
1851 else if (TREE_CODE (type) == REAL_TYPE)
1853 if (TREE_CODE (arg) == INTEGER_CST)
1855 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1856 if (tem != NULL_TREE)
1859 else if (TREE_CODE (arg) == REAL_CST)
1861 tem = fold_convert_const (NOP_EXPR, type, arg);
1862 if (tem != NULL_TREE)
1866 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1867 return fold (build1 (FLOAT_EXPR, type, arg));
1868 if (TREE_CODE (orig) == REAL_TYPE)
1869 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1871 if (TREE_CODE (orig) == COMPLEX_TYPE)
1873 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1874 return fold_convert (type, tem);
1877 else if (TREE_CODE (type) == COMPLEX_TYPE)
1879 if (INTEGRAL_TYPE_P (orig)
1880 || POINTER_TYPE_P (orig)
1881 || TREE_CODE (orig) == REAL_TYPE)
1882 return build (COMPLEX_EXPR, type,
1883 fold_convert (TREE_TYPE (type), arg),
1884 fold_convert (TREE_TYPE (type), integer_zero_node));
1885 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 if (TREE_CODE (arg) == COMPLEX_EXPR)
1891 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1892 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1893 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1896 arg = save_expr (arg);
1897 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1898 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1899 rpart = fold_convert (TREE_TYPE (type), rpart);
1900 ipart = fold_convert (TREE_TYPE (type), ipart);
1901 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1904 else if (TREE_CODE (type) == VECTOR_TYPE)
1906 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1907 && GET_MODE_SIZE (TYPE_MODE (type))
1908 == GET_MODE_SIZE (TYPE_MODE (orig)))
1909 return fold (build1 (NOP_EXPR, type, arg));
1910 if (TREE_CODE (orig) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type))
1912 == GET_MODE_SIZE (TYPE_MODE (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 else if (VOID_TYPE_P (type))
1916 return fold (build1 (CONVERT_EXPR, type, arg));
1920 /* Return an expr equal to X but certainly not valid as an lvalue. */
1927 /* These things are certainly not lvalues. */
1928 if (TREE_CODE (x) == NON_LVALUE_EXPR
1929 || TREE_CODE (x) == INTEGER_CST
1930 || TREE_CODE (x) == REAL_CST
1931 || TREE_CODE (x) == STRING_CST
1932 || TREE_CODE (x) == ADDR_EXPR)
1935 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1936 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1940 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1941 Zero means allow extended lvalues. */
1943 int pedantic_lvalues;
1945 /* When pedantic, return an expr equal to X but certainly not valid as a
1946 pedantic lvalue. Otherwise, return X. */
1949 pedantic_non_lvalue (tree x)
1951 if (pedantic_lvalues)
1952 return non_lvalue (x);
1957 /* Given a tree comparison code, return the code that is the logical inverse
1958 of the given code. It is not safe to do this for floating-point
1959 comparisons, except for NE_EXPR and EQ_EXPR. */
1961 static enum tree_code
1962 invert_tree_comparison (enum tree_code code)
1983 /* Similar, but return the comparison that results if the operands are
1984 swapped. This is safe for floating-point. */
1986 static enum tree_code
1987 swap_tree_comparison (enum tree_code code)
2008 /* Convert a comparison tree code from an enum tree_code representation
2009 into a compcode bit-based encoding. This function is the inverse of
2010 compcode_to_comparison. */
2013 comparison_to_compcode (enum tree_code code)
2034 /* Convert a compcode bit-based encoding of a comparison operator back
2035 to GCC's enum tree_code representation. This function is the
2036 inverse of comparison_to_compcode. */
2038 static enum tree_code
2039 compcode_to_comparison (int code)
2060 /* Return nonzero if CODE is a tree code that represents a truth value. */
2063 truth_value_p (enum tree_code code)
2065 return (TREE_CODE_CLASS (code) == '<'
2066 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2067 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2068 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2071 /* Return nonzero if two operands (typically of the same tree node)
2072 are necessarily equal. If either argument has side-effects this
2073 function returns zero.
2075 If ONLY_CONST is nonzero, only return nonzero for constants.
2076 This function tests whether the operands are indistinguishable;
2077 it does not test whether they are equal using C's == operation.
2078 The distinction is important for IEEE floating point, because
2079 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2080 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2082 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2083 even though it may hold multiple values during a function.
2084 This is because a GCC tree node guarantees that nothing else is
2085 executed between the evaluation of its "operands" (which may often
2086 be evaluated in arbitrary order). Hence if the operands themselves
2087 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2088 same value in each operand/subexpression. Hence a zero value for
2089 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2090 If comparing arbitrary expression trees, such as from different
2091 statements, ONLY_CONST must usually be nonzero. */
2094 operand_equal_p (tree arg0, tree arg1, int only_const)
2098 /* If both types don't have the same signedness, then we can't consider
2099 them equal. We must check this before the STRIP_NOPS calls
2100 because they may change the signedness of the arguments. */
2101 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2107 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2108 /* This is needed for conversions and for COMPONENT_REF.
2109 Might as well play it safe and always test this. */
2110 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2111 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2112 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2115 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2116 We don't care about side effects in that case because the SAVE_EXPR
2117 takes care of that for us. In all other cases, two expressions are
2118 equal if they have no side effects. If we have two identical
2119 expressions with side effects that should be treated the same due
2120 to the only side effects being identical SAVE_EXPR's, that will
2121 be detected in the recursive calls below. */
2122 if (arg0 == arg1 && ! only_const
2123 && (TREE_CODE (arg0) == SAVE_EXPR
2124 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2127 /* Next handle constant cases, those for which we can return 1 even
2128 if ONLY_CONST is set. */
2129 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2130 switch (TREE_CODE (arg0))
2133 return (! TREE_CONSTANT_OVERFLOW (arg0)
2134 && ! TREE_CONSTANT_OVERFLOW (arg1)
2135 && tree_int_cst_equal (arg0, arg1));
2138 return (! TREE_CONSTANT_OVERFLOW (arg0)
2139 && ! TREE_CONSTANT_OVERFLOW (arg1)
2140 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2141 TREE_REAL_CST (arg1)));
2147 if (TREE_CONSTANT_OVERFLOW (arg0)
2148 || TREE_CONSTANT_OVERFLOW (arg1))
2151 v1 = TREE_VECTOR_CST_ELTS (arg0);
2152 v2 = TREE_VECTOR_CST_ELTS (arg1);
2155 if (!operand_equal_p (v1, v2, only_const))
2157 v1 = TREE_CHAIN (v1);
2158 v2 = TREE_CHAIN (v2);
2165 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2167 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2171 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2172 && ! memcmp (TREE_STRING_POINTER (arg0),
2173 TREE_STRING_POINTER (arg1),
2174 TREE_STRING_LENGTH (arg0)));
2177 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2186 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2189 /* Two conversions are equal only if signedness and modes match. */
2190 switch (TREE_CODE (arg0))
2195 case FIX_TRUNC_EXPR:
2196 case FIX_FLOOR_EXPR:
2197 case FIX_ROUND_EXPR:
2198 if (TREE_UNSIGNED (TREE_TYPE (arg0))
2199 != TREE_UNSIGNED (TREE_TYPE (arg1)))
2206 return operand_equal_p (TREE_OPERAND (arg0, 0),
2207 TREE_OPERAND (arg1, 0), 0);
2211 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2212 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2216 /* For commutative ops, allow the other order. */
2217 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2218 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2219 || TREE_CODE (arg0) == BIT_IOR_EXPR
2220 || TREE_CODE (arg0) == BIT_XOR_EXPR
2221 || TREE_CODE (arg0) == BIT_AND_EXPR
2222 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2223 && operand_equal_p (TREE_OPERAND (arg0, 0),
2224 TREE_OPERAND (arg1, 1), 0)
2225 && operand_equal_p (TREE_OPERAND (arg0, 1),
2226 TREE_OPERAND (arg1, 0), 0));
2229 /* If either of the pointer (or reference) expressions we are
2230 dereferencing contain a side effect, these cannot be equal. */
2231 if (TREE_SIDE_EFFECTS (arg0)
2232 || TREE_SIDE_EFFECTS (arg1))
2235 switch (TREE_CODE (arg0))
2238 return operand_equal_p (TREE_OPERAND (arg0, 0),
2239 TREE_OPERAND (arg1, 0), 0);
2243 case ARRAY_RANGE_REF:
2244 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2245 TREE_OPERAND (arg1, 0), 0)
2246 && operand_equal_p (TREE_OPERAND (arg0, 1),
2247 TREE_OPERAND (arg1, 1), 0));
2250 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2251 TREE_OPERAND (arg1, 0), 0)
2252 && operand_equal_p (TREE_OPERAND (arg0, 1),
2253 TREE_OPERAND (arg1, 1), 0)
2254 && operand_equal_p (TREE_OPERAND (arg0, 2),
2255 TREE_OPERAND (arg1, 2), 0));
2261 switch (TREE_CODE (arg0))
2264 case TRUTH_NOT_EXPR:
2265 return operand_equal_p (TREE_OPERAND (arg0, 0),
2266 TREE_OPERAND (arg1, 0), 0);
2269 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2272 /* If the CALL_EXPRs call different functions, then they
2273 clearly can not be equal. */
2274 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2275 TREE_OPERAND (arg1, 0), 0))
2278 /* Only consider const functions equivalent. */
2279 fndecl = get_callee_fndecl (arg0);
2280 if (fndecl == NULL_TREE
2281 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2284 /* Now see if all the arguments are the same. operand_equal_p
2285 does not handle TREE_LIST, so we walk the operands here
2286 feeding them to operand_equal_p. */
2287 arg0 = TREE_OPERAND (arg0, 1);
2288 arg1 = TREE_OPERAND (arg1, 1);
2289 while (arg0 && arg1)
2291 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2294 arg0 = TREE_CHAIN (arg0);
2295 arg1 = TREE_CHAIN (arg1);
2298 /* If we get here and both argument lists are exhausted
2299 then the CALL_EXPRs are equal. */
2300 return ! (arg0 || arg1);
2307 /* Consider __builtin_sqrt equal to sqrt. */
2308 return TREE_CODE (arg0) == FUNCTION_DECL
2309 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2310 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2311 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2318 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2319 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2321 When in doubt, return 0. */
2324 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2326 int unsignedp1, unsignedpo;
2327 tree primarg0, primarg1, primother;
2328 unsigned int correct_width;
2330 if (operand_equal_p (arg0, arg1, 0))
2333 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2334 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2337 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2338 and see if the inner values are the same. This removes any
2339 signedness comparison, which doesn't matter here. */
2340 primarg0 = arg0, primarg1 = arg1;
2341 STRIP_NOPS (primarg0);
2342 STRIP_NOPS (primarg1);
2343 if (operand_equal_p (primarg0, primarg1, 0))
2346 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2347 actual comparison operand, ARG0.
2349 First throw away any conversions to wider types
2350 already present in the operands. */
2352 primarg1 = get_narrower (arg1, &unsignedp1);
2353 primother = get_narrower (other, &unsignedpo);
2355 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2356 if (unsignedp1 == unsignedpo
2357 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2358 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2360 tree type = TREE_TYPE (arg0);
2362 /* Make sure shorter operand is extended the right way
2363 to match the longer operand. */
2364 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2365 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2367 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2374 /* See if ARG is an expression that is either a comparison or is performing
2375 arithmetic on comparisons. The comparisons must only be comparing
2376 two different values, which will be stored in *CVAL1 and *CVAL2; if
2377 they are nonzero it means that some operands have already been found.
2378 No variables may be used anywhere else in the expression except in the
2379 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2380 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2382 If this is true, return 1. Otherwise, return zero. */
2385 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2387 enum tree_code code = TREE_CODE (arg);
2388 char class = TREE_CODE_CLASS (code);
2390 /* We can handle some of the 'e' cases here. */
2391 if (class == 'e' && code == TRUTH_NOT_EXPR)
2393 else if (class == 'e'
2394 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2395 || code == COMPOUND_EXPR))
2398 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2399 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2401 /* If we've already found a CVAL1 or CVAL2, this expression is
2402 two complex to handle. */
2403 if (*cval1 || *cval2)
2413 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2416 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2417 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2418 cval1, cval2, save_p));
2424 if (code == COND_EXPR)
2425 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2426 cval1, cval2, save_p)
2427 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2428 cval1, cval2, save_p)
2429 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2430 cval1, cval2, save_p));
2434 /* First see if we can handle the first operand, then the second. For
2435 the second operand, we know *CVAL1 can't be zero. It must be that
2436 one side of the comparison is each of the values; test for the
2437 case where this isn't true by failing if the two operands
2440 if (operand_equal_p (TREE_OPERAND (arg, 0),
2441 TREE_OPERAND (arg, 1), 0))
2445 *cval1 = TREE_OPERAND (arg, 0);
2446 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2448 else if (*cval2 == 0)
2449 *cval2 = TREE_OPERAND (arg, 0);
2450 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2455 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2457 else if (*cval2 == 0)
2458 *cval2 = TREE_OPERAND (arg, 1);
2459 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2471 /* ARG is a tree that is known to contain just arithmetic operations and
2472 comparisons. Evaluate the operations in the tree substituting NEW0 for
2473 any occurrence of OLD0 as an operand of a comparison and likewise for
2477 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2479 tree type = TREE_TYPE (arg);
2480 enum tree_code code = TREE_CODE (arg);
2481 char class = TREE_CODE_CLASS (code);
2483 /* We can handle some of the 'e' cases here. */
2484 if (class == 'e' && code == TRUTH_NOT_EXPR)
2486 else if (class == 'e'
2487 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2493 return fold (build1 (code, type,
2494 eval_subst (TREE_OPERAND (arg, 0),
2495 old0, new0, old1, new1)));
2498 return fold (build (code, type,
2499 eval_subst (TREE_OPERAND (arg, 0),
2500 old0, new0, old1, new1),
2501 eval_subst (TREE_OPERAND (arg, 1),
2502 old0, new0, old1, new1)));
2508 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2511 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2514 return fold (build (code, type,
2515 eval_subst (TREE_OPERAND (arg, 0),
2516 old0, new0, old1, new1),
2517 eval_subst (TREE_OPERAND (arg, 1),
2518 old0, new0, old1, new1),
2519 eval_subst (TREE_OPERAND (arg, 2),
2520 old0, new0, old1, new1)));
2524 /* Fall through - ??? */
2528 tree arg0 = TREE_OPERAND (arg, 0);
2529 tree arg1 = TREE_OPERAND (arg, 1);
2531 /* We need to check both for exact equality and tree equality. The
2532 former will be true if the operand has a side-effect. In that
2533 case, we know the operand occurred exactly once. */
2535 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2537 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2540 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2542 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2545 return fold (build (code, type, arg0, arg1));
2553 /* Return a tree for the case when the result of an expression is RESULT
2554 converted to TYPE and OMITTED was previously an operand of the expression
2555 but is now not needed (e.g., we folded OMITTED * 0).
2557 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2558 the conversion of RESULT to TYPE. */
2561 omit_one_operand (tree type, tree result, tree omitted)
2563 tree t = fold_convert (type, result);
2565 if (TREE_SIDE_EFFECTS (omitted))
2566 return build (COMPOUND_EXPR, type, omitted, t);
2568 return non_lvalue (t);
2571 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2574 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2576 tree t = fold_convert (type, result);
2578 if (TREE_SIDE_EFFECTS (omitted))
2579 return build (COMPOUND_EXPR, type, omitted, t);
2581 return pedantic_non_lvalue (t);
2584 /* Return a simplified tree node for the truth-negation of ARG. This
2585 never alters ARG itself. We assume that ARG is an operation that
2586 returns a truth value (0 or 1). */
2589 invert_truthvalue (tree arg)
2591 tree type = TREE_TYPE (arg);
2592 enum tree_code code = TREE_CODE (arg);
2594 if (code == ERROR_MARK)
2597 /* If this is a comparison, we can simply invert it, except for
2598 floating-point non-equality comparisons, in which case we just
2599 enclose a TRUTH_NOT_EXPR around what we have. */
2601 if (TREE_CODE_CLASS (code) == '<')
2603 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2604 && !flag_unsafe_math_optimizations
2607 return build1 (TRUTH_NOT_EXPR, type, arg);
2609 return build (invert_tree_comparison (code), type,
2610 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2616 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2618 case TRUTH_AND_EXPR:
2619 return build (TRUTH_OR_EXPR, type,
2620 invert_truthvalue (TREE_OPERAND (arg, 0)),
2621 invert_truthvalue (TREE_OPERAND (arg, 1)));
2624 return build (TRUTH_AND_EXPR, type,
2625 invert_truthvalue (TREE_OPERAND (arg, 0)),
2626 invert_truthvalue (TREE_OPERAND (arg, 1)));
2628 case TRUTH_XOR_EXPR:
2629 /* Here we can invert either operand. We invert the first operand
2630 unless the second operand is a TRUTH_NOT_EXPR in which case our
2631 result is the XOR of the first operand with the inside of the
2632 negation of the second operand. */
2634 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2635 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2636 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2638 return build (TRUTH_XOR_EXPR, type,
2639 invert_truthvalue (TREE_OPERAND (arg, 0)),
2640 TREE_OPERAND (arg, 1));
2642 case TRUTH_ANDIF_EXPR:
2643 return build (TRUTH_ORIF_EXPR, type,
2644 invert_truthvalue (TREE_OPERAND (arg, 0)),
2645 invert_truthvalue (TREE_OPERAND (arg, 1)));
2647 case TRUTH_ORIF_EXPR:
2648 return build (TRUTH_ANDIF_EXPR, type,
2649 invert_truthvalue (TREE_OPERAND (arg, 0)),
2650 invert_truthvalue (TREE_OPERAND (arg, 1)));
2652 case TRUTH_NOT_EXPR:
2653 return TREE_OPERAND (arg, 0);
2656 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2657 invert_truthvalue (TREE_OPERAND (arg, 1)),
2658 invert_truthvalue (TREE_OPERAND (arg, 2)));
2661 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2662 invert_truthvalue (TREE_OPERAND (arg, 1)));
2664 case WITH_RECORD_EXPR:
2665 return build (WITH_RECORD_EXPR, type,
2666 invert_truthvalue (TREE_OPERAND (arg, 0)),
2667 TREE_OPERAND (arg, 1));
2669 case NON_LVALUE_EXPR:
2670 return invert_truthvalue (TREE_OPERAND (arg, 0));
2675 return build1 (TREE_CODE (arg), type,
2676 invert_truthvalue (TREE_OPERAND (arg, 0)));
2679 if (!integer_onep (TREE_OPERAND (arg, 1)))
2681 return build (EQ_EXPR, type, arg,
2682 fold_convert (type, integer_zero_node));
2685 return build1 (TRUTH_NOT_EXPR, type, arg);
2687 case CLEANUP_POINT_EXPR:
2688 return build1 (CLEANUP_POINT_EXPR, type,
2689 invert_truthvalue (TREE_OPERAND (arg, 0)));
2694 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2696 return build1 (TRUTH_NOT_EXPR, type, arg);
2699 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2700 operands are another bit-wise operation with a common input. If so,
2701 distribute the bit operations to save an operation and possibly two if
2702 constants are involved. For example, convert
2703 (A | B) & (A | C) into A | (B & C)
2704 Further simplification will occur if B and C are constants.
2706 If this optimization cannot be done, 0 will be returned. */
2709 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2714 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2715 || TREE_CODE (arg0) == code
2716 || (TREE_CODE (arg0) != BIT_AND_EXPR
2717 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2720 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2722 common = TREE_OPERAND (arg0, 0);
2723 left = TREE_OPERAND (arg0, 1);
2724 right = TREE_OPERAND (arg1, 1);
2726 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2728 common = TREE_OPERAND (arg0, 0);
2729 left = TREE_OPERAND (arg0, 1);
2730 right = TREE_OPERAND (arg1, 0);
2732 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2734 common = TREE_OPERAND (arg0, 1);
2735 left = TREE_OPERAND (arg0, 0);
2736 right = TREE_OPERAND (arg1, 1);
2738 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2740 common = TREE_OPERAND (arg0, 1);
2741 left = TREE_OPERAND (arg0, 0);
2742 right = TREE_OPERAND (arg1, 0);
2747 return fold (build (TREE_CODE (arg0), type, common,
2748 fold (build (code, type, left, right))));
2751 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2752 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2755 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2758 tree result = build (BIT_FIELD_REF, type, inner,
2759 size_int (bitsize), bitsize_int (bitpos));
2761 TREE_UNSIGNED (result) = unsignedp;
2766 /* Optimize a bit-field compare.
2768 There are two cases: First is a compare against a constant and the
2769 second is a comparison of two items where the fields are at the same
2770 bit position relative to the start of a chunk (byte, halfword, word)
2771 large enough to contain it. In these cases we can avoid the shift
2772 implicit in bitfield extractions.
2774 For constants, we emit a compare of the shifted constant with the
2775 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2776 compared. For two fields at the same position, we do the ANDs with the
2777 similar mask and compare the result of the ANDs.
2779 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2780 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2781 are the left and right operands of the comparison, respectively.
2783 If the optimization described above can be done, we return the resulting
2784 tree. Otherwise we return zero. */
2787 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2790 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2791 tree type = TREE_TYPE (lhs);
2792 tree signed_type, unsigned_type;
2793 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2794 enum machine_mode lmode, rmode, nmode;
2795 int lunsignedp, runsignedp;
2796 int lvolatilep = 0, rvolatilep = 0;
2797 tree linner, rinner = NULL_TREE;
2801 /* Get all the information about the extractions being done. If the bit size
2802 if the same as the size of the underlying object, we aren't doing an
2803 extraction at all and so can do nothing. We also don't want to
2804 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2805 then will no longer be able to replace it. */
2806 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2807 &lunsignedp, &lvolatilep);
2808 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2809 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2814 /* If this is not a constant, we can only do something if bit positions,
2815 sizes, and signedness are the same. */
2816 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2817 &runsignedp, &rvolatilep);
2819 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2820 || lunsignedp != runsignedp || offset != 0
2821 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2825 /* See if we can find a mode to refer to this field. We should be able to,
2826 but fail if we can't. */
2827 nmode = get_best_mode (lbitsize, lbitpos,
2828 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2829 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2830 TYPE_ALIGN (TREE_TYPE (rinner))),
2831 word_mode, lvolatilep || rvolatilep);
2832 if (nmode == VOIDmode)
2835 /* Set signed and unsigned types of the precision of this mode for the
2837 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2838 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2840 /* Compute the bit position and size for the new reference and our offset
2841 within it. If the new reference is the same size as the original, we
2842 won't optimize anything, so return zero. */
2843 nbitsize = GET_MODE_BITSIZE (nmode);
2844 nbitpos = lbitpos & ~ (nbitsize - 1);
2846 if (nbitsize == lbitsize)
2849 if (BYTES_BIG_ENDIAN)
2850 lbitpos = nbitsize - lbitsize - lbitpos;
2852 /* Make the mask to be used against the extracted field. */
2853 mask = build_int_2 (~0, ~0);
2854 TREE_TYPE (mask) = unsigned_type;
2855 force_fit_type (mask, 0);
2856 mask = fold_convert (unsigned_type, mask);
2857 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2858 mask = const_binop (RSHIFT_EXPR, mask,
2859 size_int (nbitsize - lbitsize - lbitpos), 0);
2862 /* If not comparing with constant, just rework the comparison
2864 return build (code, compare_type,
2865 build (BIT_AND_EXPR, unsigned_type,
2866 make_bit_field_ref (linner, unsigned_type,
2867 nbitsize, nbitpos, 1),
2869 build (BIT_AND_EXPR, unsigned_type,
2870 make_bit_field_ref (rinner, unsigned_type,
2871 nbitsize, nbitpos, 1),
2874 /* Otherwise, we are handling the constant case. See if the constant is too
2875 big for the field. Warn and return a tree of for 0 (false) if so. We do
2876 this not only for its own sake, but to avoid having to test for this
2877 error case below. If we didn't, we might generate wrong code.
2879 For unsigned fields, the constant shifted right by the field length should
2880 be all zero. For signed fields, the high-order bits should agree with
2885 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2886 fold_convert (unsigned_type, rhs),
2887 size_int (lbitsize), 0)))
2889 warning ("comparison is always %d due to width of bit-field",
2891 return fold_convert (compare_type,
2893 ? integer_one_node : integer_zero_node));
2898 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2899 size_int (lbitsize - 1), 0);
2900 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2902 warning ("comparison is always %d due to width of bit-field",
2904 return fold_convert (compare_type,
2906 ? integer_one_node : integer_zero_node));
2910 /* Single-bit compares should always be against zero. */
2911 if (lbitsize == 1 && ! integer_zerop (rhs))
2913 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2914 rhs = fold_convert (type, integer_zero_node);
2917 /* Make a new bitfield reference, shift the constant over the
2918 appropriate number of bits and mask it with the computed mask
2919 (in case this was a signed field). If we changed it, make a new one. */
2920 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2923 TREE_SIDE_EFFECTS (lhs) = 1;
2924 TREE_THIS_VOLATILE (lhs) = 1;
2927 rhs = fold (const_binop (BIT_AND_EXPR,
2928 const_binop (LSHIFT_EXPR,
2929 fold_convert (unsigned_type, rhs),
2930 size_int (lbitpos), 0),
2933 return build (code, compare_type,
2934 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2938 /* Subroutine for fold_truthop: decode a field reference.
2940 If EXP is a comparison reference, we return the innermost reference.
2942 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2943 set to the starting bit number.
2945 If the innermost field can be completely contained in a mode-sized
2946 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2948 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2949 otherwise it is not changed.
2951 *PUNSIGNEDP is set to the signedness of the field.
2953 *PMASK is set to the mask used. This is either contained in a
2954 BIT_AND_EXPR or derived from the width of the field.
2956 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2958 Return 0 if this is not a component reference or is one that we can't
2959 do anything with. */
2962 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2963 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2964 int *punsignedp, int *pvolatilep,
2965 tree *pmask, tree *pand_mask)
2967 tree outer_type = 0;
2969 tree mask, inner, offset;
2971 unsigned int precision;
2973 /* All the optimizations using this function assume integer fields.
2974 There are problems with FP fields since the type_for_size call
2975 below can fail for, e.g., XFmode. */
2976 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2979 /* We are interested in the bare arrangement of bits, so strip everything
2980 that doesn't affect the machine mode. However, record the type of the
2981 outermost expression if it may matter below. */
2982 if (TREE_CODE (exp) == NOP_EXPR
2983 || TREE_CODE (exp) == CONVERT_EXPR
2984 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2985 outer_type = TREE_TYPE (exp);
2988 if (TREE_CODE (exp) == BIT_AND_EXPR)
2990 and_mask = TREE_OPERAND (exp, 1);
2991 exp = TREE_OPERAND (exp, 0);
2992 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2993 if (TREE_CODE (and_mask) != INTEGER_CST)
2997 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2998 punsignedp, pvolatilep);
2999 if ((inner == exp && and_mask == 0)
3000 || *pbitsize < 0 || offset != 0
3001 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3004 /* If the number of bits in the reference is the same as the bitsize of
3005 the outer type, then the outer type gives the signedness. Otherwise
3006 (in case of a small bitfield) the signedness is unchanged. */
3007 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3008 *punsignedp = TREE_UNSIGNED (outer_type);
3010 /* Compute the mask to access the bitfield. */
3011 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3012 precision = TYPE_PRECISION (unsigned_type);
3014 mask = build_int_2 (~0, ~0);
3015 TREE_TYPE (mask) = unsigned_type;
3016 force_fit_type (mask, 0);
3017 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3018 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3020 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3022 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3023 fold_convert (unsigned_type, and_mask), mask));
3026 *pand_mask = and_mask;
3030 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3034 all_ones_mask_p (tree mask, int size)
3036 tree type = TREE_TYPE (mask);
3037 unsigned int precision = TYPE_PRECISION (type);
3040 tmask = build_int_2 (~0, ~0);
3041 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3042 force_fit_type (tmask, 0);
3044 tree_int_cst_equal (mask,
3045 const_binop (RSHIFT_EXPR,
3046 const_binop (LSHIFT_EXPR, tmask,
3047 size_int (precision - size),
3049 size_int (precision - size), 0));
3052 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3053 represents the sign bit of EXP's type. If EXP represents a sign
3054 or zero extension, also test VAL against the unextended type.
3055 The return value is the (sub)expression whose sign bit is VAL,
3056 or NULL_TREE otherwise. */
3059 sign_bit_p (tree exp, tree val)
3061 unsigned HOST_WIDE_INT mask_lo, lo;
3062 HOST_WIDE_INT mask_hi, hi;
3066 /* Tree EXP must have an integral type. */
3067 t = TREE_TYPE (exp);
3068 if (! INTEGRAL_TYPE_P (t))
3071 /* Tree VAL must be an integer constant. */
3072 if (TREE_CODE (val) != INTEGER_CST
3073 || TREE_CONSTANT_OVERFLOW (val))
3076 width = TYPE_PRECISION (t);
3077 if (width > HOST_BITS_PER_WIDE_INT)
3079 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3082 mask_hi = ((unsigned HOST_WIDE_INT) -1
3083 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3089 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3092 mask_lo = ((unsigned HOST_WIDE_INT) -1
3093 >> (HOST_BITS_PER_WIDE_INT - width));
3096 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3097 treat VAL as if it were unsigned. */
3098 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3099 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3102 /* Handle extension from a narrower type. */
3103 if (TREE_CODE (exp) == NOP_EXPR
3104 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3105 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3110 /* Subroutine for fold_truthop: determine if an operand is simple enough
3111 to be evaluated unconditionally. */
3114 simple_operand_p (tree exp)
3116 /* Strip any conversions that don't change the machine mode. */
3117 while ((TREE_CODE (exp) == NOP_EXPR
3118 || TREE_CODE (exp) == CONVERT_EXPR)
3119 && (TYPE_MODE (TREE_TYPE (exp))
3120 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3121 exp = TREE_OPERAND (exp, 0);
3123 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3125 && ! TREE_ADDRESSABLE (exp)
3126 && ! TREE_THIS_VOLATILE (exp)
3127 && ! DECL_NONLOCAL (exp)
3128 /* Don't regard global variables as simple. They may be
3129 allocated in ways unknown to the compiler (shared memory,
3130 #pragma weak, etc). */
3131 && ! TREE_PUBLIC (exp)
3132 && ! DECL_EXTERNAL (exp)
3133 /* Loading a static variable is unduly expensive, but global
3134 registers aren't expensive. */
3135 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3138 /* The following functions are subroutines to fold_range_test and allow it to
3139 try to change a logical combination of comparisons into a range test.
3142 X == 2 || X == 3 || X == 4 || X == 5
3146 (unsigned) (X - 2) <= 3
3148 We describe each set of comparisons as being either inside or outside
3149 a range, using a variable named like IN_P, and then describe the
3150 range with a lower and upper bound. If one of the bounds is omitted,
3151 it represents either the highest or lowest value of the type.
3153 In the comments below, we represent a range by two numbers in brackets
3154 preceded by a "+" to designate being inside that range, or a "-" to
3155 designate being outside that range, so the condition can be inverted by
3156 flipping the prefix. An omitted bound is represented by a "-". For
3157 example, "- [-, 10]" means being outside the range starting at the lowest
3158 possible value and ending at 10, in other words, being greater than 10.
3159 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3162 We set up things so that the missing bounds are handled in a consistent
3163 manner so neither a missing bound nor "true" and "false" need to be
3164 handled using a special case. */
3166 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3167 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3168 and UPPER1_P are nonzero if the respective argument is an upper bound
3169 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3170 must be specified for a comparison. ARG1 will be converted to ARG0's
3171 type if both are specified. */
3174 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3175 tree arg1, int upper1_p)
3181 /* If neither arg represents infinity, do the normal operation.
3182 Else, if not a comparison, return infinity. Else handle the special
3183 comparison rules. Note that most of the cases below won't occur, but
3184 are handled for consistency. */
3186 if (arg0 != 0 && arg1 != 0)
3188 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3189 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3191 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3194 if (TREE_CODE_CLASS (code) != '<')
3197 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3198 for neither. In real maths, we cannot assume open ended ranges are
3199 the same. But, this is computer arithmetic, where numbers are finite.
3200 We can therefore make the transformation of any unbounded range with
3201 the value Z, Z being greater than any representable number. This permits
3202 us to treat unbounded ranges as equal. */
3203 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3204 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3208 result = sgn0 == sgn1;
3211 result = sgn0 != sgn1;
3214 result = sgn0 < sgn1;
3217 result = sgn0 <= sgn1;
3220 result = sgn0 > sgn1;
3223 result = sgn0 >= sgn1;
3229 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3232 /* Given EXP, a logical expression, set the range it is testing into
3233 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3234 actually being tested. *PLOW and *PHIGH will be made of the same type
3235 as the returned expression. If EXP is not a comparison, we will most
3236 likely not be returning a useful value and range. */
3239 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3241 enum tree_code code;
3242 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3243 tree orig_type = NULL_TREE;
3245 tree low, high, n_low, n_high;
3247 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3248 and see if we can refine the range. Some of the cases below may not
3249 happen, but it doesn't seem worth worrying about this. We "continue"
3250 the outer loop when we've changed something; otherwise we "break"
3251 the switch, which will "break" the while. */
3254 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3258 code = TREE_CODE (exp);
3260 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3262 if (first_rtl_op (code) > 0)
3263 arg0 = TREE_OPERAND (exp, 0);
3264 if (TREE_CODE_CLASS (code) == '<'
3265 || TREE_CODE_CLASS (code) == '1'
3266 || TREE_CODE_CLASS (code) == '2')
3267 type = TREE_TYPE (arg0);
3268 if (TREE_CODE_CLASS (code) == '2'
3269 || TREE_CODE_CLASS (code) == '<'
3270 || (TREE_CODE_CLASS (code) == 'e'
3271 && TREE_CODE_LENGTH (code) > 1))
3272 arg1 = TREE_OPERAND (exp, 1);
3275 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3276 lose a cast by accident. */
3277 if (type != NULL_TREE && orig_type == NULL_TREE)
3282 case TRUTH_NOT_EXPR:
3283 in_p = ! in_p, exp = arg0;
3286 case EQ_EXPR: case NE_EXPR:
3287 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3288 /* We can only do something if the range is testing for zero
3289 and if the second operand is an integer constant. Note that
3290 saying something is "in" the range we make is done by
3291 complementing IN_P since it will set in the initial case of
3292 being not equal to zero; "out" is leaving it alone. */
3293 if (low == 0 || high == 0
3294 || ! integer_zerop (low) || ! integer_zerop (high)
3295 || TREE_CODE (arg1) != INTEGER_CST)
3300 case NE_EXPR: /* - [c, c] */
3303 case EQ_EXPR: /* + [c, c] */
3304 in_p = ! in_p, low = high = arg1;
3306 case GT_EXPR: /* - [-, c] */
3307 low = 0, high = arg1;
3309 case GE_EXPR: /* + [c, -] */
3310 in_p = ! in_p, low = arg1, high = 0;
3312 case LT_EXPR: /* - [c, -] */
3313 low = arg1, high = 0;
3315 case LE_EXPR: /* + [-, c] */
3316 in_p = ! in_p, low = 0, high = arg1;
3324 /* If this is an unsigned comparison, we also know that EXP is
3325 greater than or equal to zero. We base the range tests we make
3326 on that fact, so we record it here so we can parse existing
3328 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3330 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3331 1, fold_convert (type, integer_zero_node),
3335 in_p = n_in_p, low = n_low, high = n_high;
3337 /* If the high bound is missing, but we have a nonzero low
3338 bound, reverse the range so it goes from zero to the low bound
3340 if (high == 0 && low && ! integer_zerop (low))
3343 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3344 integer_one_node, 0);
3345 low = fold_convert (type, integer_zero_node);
3351 /* (-x) IN [a,b] -> x in [-b, -a] */
3352 n_low = range_binop (MINUS_EXPR, type,
3353 fold_convert (type, integer_zero_node),
3355 n_high = range_binop (MINUS_EXPR, type,
3356 fold_convert (type, integer_zero_node),
3358 low = n_low, high = n_high;
3364 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3365 fold_convert (type, integer_one_node));
3368 case PLUS_EXPR: case MINUS_EXPR:
3369 if (TREE_CODE (arg1) != INTEGER_CST)
3372 /* If EXP is signed, any overflow in the computation is undefined,
3373 so we don't worry about it so long as our computations on
3374 the bounds don't overflow. For unsigned, overflow is defined
3375 and this is exactly the right thing. */
3376 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3377 type, low, 0, arg1, 0);
3378 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3379 type, high, 1, arg1, 0);
3380 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3381 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3384 /* Check for an unsigned range which has wrapped around the maximum
3385 value thus making n_high < n_low, and normalize it. */
3386 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3388 low = range_binop (PLUS_EXPR, type, n_high, 0,
3389 integer_one_node, 0);
3390 high = range_binop (MINUS_EXPR, type, n_low, 0,
3391 integer_one_node, 0);
3393 /* If the range is of the form +/- [ x+1, x ], we won't
3394 be able to normalize it. But then, it represents the
3395 whole range or the empty set, so make it
3397 if (tree_int_cst_equal (n_low, low)
3398 && tree_int_cst_equal (n_high, high))
3404 low = n_low, high = n_high;
3409 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3410 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3413 if (! INTEGRAL_TYPE_P (type)
3414 || (low != 0 && ! int_fits_type_p (low, type))
3415 || (high != 0 && ! int_fits_type_p (high, type)))
3418 n_low = low, n_high = high;
3421 n_low = fold_convert (type, n_low);
3424 n_high = fold_convert (type, n_high);
3426 /* If we're converting from an unsigned to a signed type,
3427 we will be doing the comparison as unsigned. The tests above
3428 have already verified that LOW and HIGH are both positive.
3430 So we have to make sure that the original unsigned value will
3431 be interpreted as positive. */
3432 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3434 tree equiv_type = (*lang_hooks.types.type_for_mode)
3435 (TYPE_MODE (type), 1);
3438 /* A range without an upper bound is, naturally, unbounded.
3439 Since convert would have cropped a very large value, use
3440 the max value for the destination type. */
3442 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3443 : TYPE_MAX_VALUE (type);
3445 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3446 high_positive = fold (build (RSHIFT_EXPR, type,
3450 integer_one_node)));
3452 /* If the low bound is specified, "and" the range with the
3453 range for which the original unsigned value will be
3457 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3458 1, n_low, n_high, 1,
3459 fold_convert (type, integer_zero_node),
3463 in_p = (n_in_p == in_p);
3467 /* Otherwise, "or" the range with the range of the input
3468 that will be interpreted as negative. */
3469 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3470 0, n_low, n_high, 1,
3471 fold_convert (type, integer_zero_node),
3475 in_p = (in_p != n_in_p);
3480 low = n_low, high = n_high;
3490 /* If EXP is a constant, we can evaluate whether this is true or false. */
3491 if (TREE_CODE (exp) == INTEGER_CST)
3493 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3495 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3501 *pin_p = in_p, *plow = low, *phigh = high;
3505 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3506 type, TYPE, return an expression to test if EXP is in (or out of, depending
3507 on IN_P) the range. */
3510 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3512 tree etype = TREE_TYPE (exp);
3516 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3517 return invert_truthvalue (value);
3519 if (low == 0 && high == 0)
3520 return fold_convert (type, integer_one_node);
3523 return fold (build (LE_EXPR, type, exp, high));
3526 return fold (build (GE_EXPR, type, exp, low));
3528 if (operand_equal_p (low, high, 0))
3529 return fold (build (EQ_EXPR, type, exp, low));
3531 if (integer_zerop (low))
3533 if (! TREE_UNSIGNED (etype))
3535 etype = (*lang_hooks.types.unsigned_type) (etype);
3536 high = fold_convert (etype, high);
3537 exp = fold_convert (etype, exp);
3539 return build_range_check (type, exp, 1, 0, high);
3542 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3543 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3545 unsigned HOST_WIDE_INT lo;
3549 /* For enums the comparison will be done in the underlying type,
3550 so using enum's precision is wrong here.
3551 Consider e.g. enum { A, B, C, D, E }, low == B and high == D. */
3552 if (TREE_CODE (etype) == ENUMERAL_TYPE)
3553 prec = GET_MODE_BITSIZE (TYPE_MODE (etype));
3555 prec = TYPE_PRECISION (etype);
3556 if (prec <= HOST_BITS_PER_WIDE_INT)
3559 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3563 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3564 lo = (unsigned HOST_WIDE_INT) -1;
3567 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3569 if (TREE_UNSIGNED (etype))
3571 etype = (*lang_hooks.types.signed_type) (etype);
3572 exp = fold_convert (etype, exp);
3574 return fold (build (GT_EXPR, type, exp,
3575 fold_convert (etype, integer_zero_node)));
3579 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3580 && ! TREE_OVERFLOW (value))
3581 return build_range_check (type,
3582 fold (build (MINUS_EXPR, etype, exp, low)),
3583 1, fold_convert (etype, integer_zero_node),
3589 /* Given two ranges, see if we can merge them into one. Return 1 if we
3590 can, 0 if we can't. Set the output range into the specified parameters. */
3593 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3594 tree high0, int in1_p, tree low1, tree high1)
3602 int lowequal = ((low0 == 0 && low1 == 0)
3603 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3604 low0, 0, low1, 0)));
3605 int highequal = ((high0 == 0 && high1 == 0)
3606 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3607 high0, 1, high1, 1)));
3609 /* Make range 0 be the range that starts first, or ends last if they
3610 start at the same value. Swap them if it isn't. */
3611 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3614 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3615 high1, 1, high0, 1))))
3617 temp = in0_p, in0_p = in1_p, in1_p = temp;
3618 tem = low0, low0 = low1, low1 = tem;
3619 tem = high0, high0 = high1, high1 = tem;
3622 /* Now flag two cases, whether the ranges are disjoint or whether the
3623 second range is totally subsumed in the first. Note that the tests
3624 below are simplified by the ones above. */
3625 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3626 high0, 1, low1, 0));
3627 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3628 high1, 1, high0, 1));
3630 /* We now have four cases, depending on whether we are including or
3631 excluding the two ranges. */
3634 /* If they don't overlap, the result is false. If the second range
3635 is a subset it is the result. Otherwise, the range is from the start
3636 of the second to the end of the first. */
3638 in_p = 0, low = high = 0;
3640 in_p = 1, low = low1, high = high1;
3642 in_p = 1, low = low1, high = high0;
3645 else if (in0_p && ! in1_p)
3647 /* If they don't overlap, the result is the first range. If they are
3648 equal, the result is false. If the second range is a subset of the
3649 first, and the ranges begin at the same place, we go from just after
3650 the end of the first range to the end of the second. If the second
3651 range is not a subset of the first, or if it is a subset and both
3652 ranges end at the same place, the range starts at the start of the
3653 first range and ends just before the second range.
3654 Otherwise, we can't describe this as a single range. */
3656 in_p = 1, low = low0, high = high0;
3657 else if (lowequal && highequal)
3658 in_p = 0, low = high = 0;
3659 else if (subset && lowequal)
3661 in_p = 1, high = high0;
3662 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3663 integer_one_node, 0);
3665 else if (! subset || highequal)
3667 in_p = 1, low = low0;
3668 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3669 integer_one_node, 0);
3675 else if (! in0_p && in1_p)
3677 /* If they don't overlap, the result is the second range. If the second
3678 is a subset of the first, the result is false. Otherwise,
3679 the range starts just after the first range and ends at the
3680 end of the second. */
3682 in_p = 1, low = low1, high = high1;
3683 else if (subset || highequal)
3684 in_p = 0, low = high = 0;
3687 in_p = 1, high = high1;
3688 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3689 integer_one_node, 0);
3695 /* The case where we are excluding both ranges. Here the complex case
3696 is if they don't overlap. In that case, the only time we have a
3697 range is if they are adjacent. If the second is a subset of the
3698 first, the result is the first. Otherwise, the range to exclude
3699 starts at the beginning of the first range and ends at the end of the
3703 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3704 range_binop (PLUS_EXPR, NULL_TREE,
3706 integer_one_node, 1),
3708 in_p = 0, low = low0, high = high1;
3713 in_p = 0, low = low0, high = high0;
3715 in_p = 0, low = low0, high = high1;
3718 *pin_p = in_p, *plow = low, *phigh = high;
3722 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3723 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3726 /* EXP is some logical combination of boolean tests. See if we can
3727 merge it into some range test. Return the new tree if so. */
3730 fold_range_test (tree exp)
3732 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3733 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3734 int in0_p, in1_p, in_p;
3735 tree low0, low1, low, high0, high1, high;
3736 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3737 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3740 /* If this is an OR operation, invert both sides; we will invert
3741 again at the end. */
3743 in0_p = ! in0_p, in1_p = ! in1_p;
3745 /* If both expressions are the same, if we can merge the ranges, and we
3746 can build the range test, return it or it inverted. If one of the
3747 ranges is always true or always false, consider it to be the same
3748 expression as the other. */
3749 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3750 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3752 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3754 : rhs != 0 ? rhs : integer_zero_node,
3756 return or_op ? invert_truthvalue (tem) : tem;
3758 /* On machines where the branch cost is expensive, if this is a
3759 short-circuited branch and the underlying object on both sides
3760 is the same, make a non-short-circuit operation. */
3761 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3762 && lhs != 0 && rhs != 0
3763 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3764 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3765 && operand_equal_p (lhs, rhs, 0))
3767 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3768 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3769 which cases we can't do this. */
3770 if (simple_operand_p (lhs))
3771 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3772 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3773 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3774 TREE_OPERAND (exp, 1));
3776 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3777 && ! CONTAINS_PLACEHOLDER_P (lhs))
3779 tree common = save_expr (lhs);
3781 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3782 or_op ? ! in0_p : in0_p,
3784 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3785 or_op ? ! in1_p : in1_p,
3787 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3788 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3789 TREE_TYPE (exp), lhs, rhs);
3796 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3797 bit value. Arrange things so the extra bits will be set to zero if and
3798 only if C is signed-extended to its full width. If MASK is nonzero,
3799 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3802 unextend (tree c, int p, int unsignedp, tree mask)
3804 tree type = TREE_TYPE (c);
3805 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3808 if (p == modesize || unsignedp)
3811 /* We work by getting just the sign bit into the low-order bit, then
3812 into the high-order bit, then sign-extend. We then XOR that value
3814 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3815 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3817 /* We must use a signed type in order to get an arithmetic right shift.
3818 However, we must also avoid introducing accidental overflows, so that
3819 a subsequent call to integer_zerop will work. Hence we must
3820 do the type conversion here. At this point, the constant is either
3821 zero or one, and the conversion to a signed type can never overflow.
3822 We could get an overflow if this conversion is done anywhere else. */
3823 if (TREE_UNSIGNED (type))
3824 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3826 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3827 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3829 temp = const_binop (BIT_AND_EXPR, temp,
3830 fold_convert (TREE_TYPE (c), mask), 0);
3831 /* If necessary, convert the type back to match the type of C. */
3832 if (TREE_UNSIGNED (type))
3833 temp = fold_convert (type, temp);
3835 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3838 /* Find ways of folding logical expressions of LHS and RHS:
3839 Try to merge two comparisons to the same innermost item.
3840 Look for range tests like "ch >= '0' && ch <= '9'".
3841 Look for combinations of simple terms on machines with expensive branches
3842 and evaluate the RHS unconditionally.
3844 For example, if we have p->a == 2 && p->b == 4 and we can make an
3845 object large enough to span both A and B, we can do this with a comparison
3846 against the object ANDed with the a mask.
3848 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3849 operations to do this with one comparison.
3851 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3852 function and the one above.
3854 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3855 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3857 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3860 We return the simplified tree or 0 if no optimization is possible. */
3863 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3865 /* If this is the "or" of two comparisons, we can do something if
3866 the comparisons are NE_EXPR. If this is the "and", we can do something
3867 if the comparisons are EQ_EXPR. I.e.,
3868 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3870 WANTED_CODE is this operation code. For single bit fields, we can
3871 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3872 comparison for one-bit fields. */
3874 enum tree_code wanted_code;
3875 enum tree_code lcode, rcode;
3876 tree ll_arg, lr_arg, rl_arg, rr_arg;
3877 tree ll_inner, lr_inner, rl_inner, rr_inner;
3878 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3879 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3880 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3881 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3882 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3883 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3884 enum machine_mode lnmode, rnmode;
3885 tree ll_mask, lr_mask, rl_mask, rr_mask;
3886 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3887 tree l_const, r_const;
3888 tree lntype, rntype, result;
3889 int first_bit, end_bit;
3892 /* Start by getting the comparison codes. Fail if anything is volatile.
3893 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3894 it were surrounded with a NE_EXPR. */
3896 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3899 lcode = TREE_CODE (lhs);
3900 rcode = TREE_CODE (rhs);
3902 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3903 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3905 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3906 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3908 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3911 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3912 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3914 ll_arg = TREE_OPERAND (lhs, 0);
3915 lr_arg = TREE_OPERAND (lhs, 1);
3916 rl_arg = TREE_OPERAND (rhs, 0);
3917 rr_arg = TREE_OPERAND (rhs, 1);
3919 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3920 if (simple_operand_p (ll_arg)
3921 && simple_operand_p (lr_arg)
3922 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3926 if (operand_equal_p (ll_arg, rl_arg, 0)
3927 && operand_equal_p (lr_arg, rr_arg, 0))
3929 int lcompcode, rcompcode;
3931 lcompcode = comparison_to_compcode (lcode);
3932 rcompcode = comparison_to_compcode (rcode);
3933 compcode = (code == TRUTH_AND_EXPR)
3934 ? lcompcode & rcompcode
3935 : lcompcode | rcompcode;
3937 else if (operand_equal_p (ll_arg, rr_arg, 0)
3938 && operand_equal_p (lr_arg, rl_arg, 0))
3940 int lcompcode, rcompcode;
3942 rcode = swap_tree_comparison (rcode);
3943 lcompcode = comparison_to_compcode (lcode);
3944 rcompcode = comparison_to_compcode (rcode);
3945 compcode = (code == TRUTH_AND_EXPR)
3946 ? lcompcode & rcompcode
3947 : lcompcode | rcompcode;
3952 if (compcode == COMPCODE_TRUE)
3953 return fold_convert (truth_type, integer_one_node);
3954 else if (compcode == COMPCODE_FALSE)
3955 return fold_convert (truth_type, integer_zero_node);
3956 else if (compcode != -1)
3957 return build (compcode_to_comparison (compcode),
3958 truth_type, ll_arg, lr_arg);
3961 /* If the RHS can be evaluated unconditionally and its operands are
3962 simple, it wins to evaluate the RHS unconditionally on machines
3963 with expensive branches. In this case, this isn't a comparison
3964 that can be merged. Avoid doing this if the RHS is a floating-point
3965 comparison since those can trap. */
3967 if (BRANCH_COST >= 2
3968 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3969 && simple_operand_p (rl_arg)
3970 && simple_operand_p (rr_arg))
3972 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3973 if (code == TRUTH_OR_EXPR
3974 && lcode == NE_EXPR && integer_zerop (lr_arg)
3975 && rcode == NE_EXPR && integer_zerop (rr_arg)
3976 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3977 return build (NE_EXPR, truth_type,
3978 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3982 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3983 if (code == TRUTH_AND_EXPR
3984 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3985 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3986 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3987 return build (EQ_EXPR, truth_type,
3988 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3992 return build (code, truth_type, lhs, rhs);
3995 /* See if the comparisons can be merged. Then get all the parameters for
3998 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3999 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4003 ll_inner = decode_field_reference (ll_arg,
4004 &ll_bitsize, &ll_bitpos, &ll_mode,
4005 &ll_unsignedp, &volatilep, &ll_mask,
4007 lr_inner = decode_field_reference (lr_arg,
4008 &lr_bitsize, &lr_bitpos, &lr_mode,
4009 &lr_unsignedp, &volatilep, &lr_mask,
4011 rl_inner = decode_field_reference (rl_arg,
4012 &rl_bitsize, &rl_bitpos, &rl_mode,
4013 &rl_unsignedp, &volatilep, &rl_mask,
4015 rr_inner = decode_field_reference (rr_arg,
4016 &rr_bitsize, &rr_bitpos, &rr_mode,
4017 &rr_unsignedp, &volatilep, &rr_mask,
4020 /* It must be true that the inner operation on the lhs of each
4021 comparison must be the same if we are to be able to do anything.
4022 Then see if we have constants. If not, the same must be true for
4024 if (volatilep || ll_inner == 0 || rl_inner == 0
4025 || ! operand_equal_p (ll_inner, rl_inner, 0))
4028 if (TREE_CODE (lr_arg) == INTEGER_CST
4029 && TREE_CODE (rr_arg) == INTEGER_CST)
4030 l_const = lr_arg, r_const = rr_arg;
4031 else if (lr_inner == 0 || rr_inner == 0
4032 || ! operand_equal_p (lr_inner, rr_inner, 0))
4035 l_const = r_const = 0;
4037 /* If either comparison code is not correct for our logical operation,
4038 fail. However, we can convert a one-bit comparison against zero into
4039 the opposite comparison against that bit being set in the field. */
4041 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4042 if (lcode != wanted_code)
4044 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4046 /* Make the left operand unsigned, since we are only interested
4047 in the value of one bit. Otherwise we are doing the wrong
4056 /* This is analogous to the code for l_const above. */
4057 if (rcode != wanted_code)
4059 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4068 /* After this point all optimizations will generate bit-field
4069 references, which we might not want. */
4070 if (! (*lang_hooks.can_use_bit_fields_p) ())
4073 /* See if we can find a mode that contains both fields being compared on
4074 the left. If we can't, fail. Otherwise, update all constants and masks
4075 to be relative to a field of that size. */
4076 first_bit = MIN (ll_bitpos, rl_bitpos);
4077 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4078 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4079 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4081 if (lnmode == VOIDmode)
4084 lnbitsize = GET_MODE_BITSIZE (lnmode);
4085 lnbitpos = first_bit & ~ (lnbitsize - 1);
4086 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4087 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4089 if (BYTES_BIG_ENDIAN)
4091 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4092 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4095 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4096 size_int (xll_bitpos), 0);
4097 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4098 size_int (xrl_bitpos), 0);
4102 l_const = fold_convert (lntype, l_const);
4103 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4104 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4105 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4106 fold (build1 (BIT_NOT_EXPR,
4110 warning ("comparison is always %d", wanted_code == NE_EXPR);
4112 return fold_convert (truth_type,
4113 wanted_code == NE_EXPR
4114 ? integer_one_node : integer_zero_node);
4119 r_const = fold_convert (lntype, r_const);
4120 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4121 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4122 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4123 fold (build1 (BIT_NOT_EXPR,
4127 warning ("comparison is always %d", wanted_code == NE_EXPR);
4129 return fold_convert (truth_type,
4130 wanted_code == NE_EXPR
4131 ? integer_one_node : integer_zero_node);
4135 /* If the right sides are not constant, do the same for it. Also,
4136 disallow this optimization if a size or signedness mismatch occurs
4137 between the left and right sides. */
4140 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4141 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4142 /* Make sure the two fields on the right
4143 correspond to the left without being swapped. */
4144 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4147 first_bit = MIN (lr_bitpos, rr_bitpos);
4148 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4149 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4150 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4152 if (rnmode == VOIDmode)
4155 rnbitsize = GET_MODE_BITSIZE (rnmode);
4156 rnbitpos = first_bit & ~ (rnbitsize - 1);
4157 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4158 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4160 if (BYTES_BIG_ENDIAN)
4162 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4163 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4166 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4167 size_int (xlr_bitpos), 0);
4168 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4169 size_int (xrr_bitpos), 0);
4171 /* Make a mask that corresponds to both fields being compared.
4172 Do this for both items being compared. If the operands are the
4173 same size and the bits being compared are in the same position
4174 then we can do this by masking both and comparing the masked
4176 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4177 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4178 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4180 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4181 ll_unsignedp || rl_unsignedp);
4182 if (! all_ones_mask_p (ll_mask, lnbitsize))
4183 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4185 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4186 lr_unsignedp || rr_unsignedp);
4187 if (! all_ones_mask_p (lr_mask, rnbitsize))
4188 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4190 return build (wanted_code, truth_type, lhs, rhs);
4193 /* There is still another way we can do something: If both pairs of
4194 fields being compared are adjacent, we may be able to make a wider
4195 field containing them both.
4197 Note that we still must mask the lhs/rhs expressions. Furthermore,
4198 the mask must be shifted to account for the shift done by
4199 make_bit_field_ref. */
4200 if ((ll_bitsize + ll_bitpos == rl_bitpos
4201 && lr_bitsize + lr_bitpos == rr_bitpos)
4202 || (ll_bitpos == rl_bitpos + rl_bitsize
4203 && lr_bitpos == rr_bitpos + rr_bitsize))
4207 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4208 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4209 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4210 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4212 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4213 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4214 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4215 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4217 /* Convert to the smaller type before masking out unwanted bits. */
4219 if (lntype != rntype)
4221 if (lnbitsize > rnbitsize)
4223 lhs = fold_convert (rntype, lhs);
4224 ll_mask = fold_convert (rntype, ll_mask);
4227 else if (lnbitsize < rnbitsize)
4229 rhs = fold_convert (lntype, rhs);
4230 lr_mask = fold_convert (lntype, lr_mask);
4235 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4236 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4238 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4239 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4241 return build (wanted_code, truth_type, lhs, rhs);
4247 /* Handle the case of comparisons with constants. If there is something in
4248 common between the masks, those bits of the constants must be the same.
4249 If not, the condition is always false. Test for this to avoid generating
4250 incorrect code below. */
4251 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4252 if (! integer_zerop (result)
4253 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4254 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4256 if (wanted_code == NE_EXPR)
4258 warning ("`or' of unmatched not-equal tests is always 1");
4259 return fold_convert (truth_type, integer_one_node);
4263 warning ("`and' of mutually exclusive equal-tests is always 0");
4264 return fold_convert (truth_type, integer_zero_node);
4268 /* Construct the expression we will return. First get the component
4269 reference we will make. Unless the mask is all ones the width of
4270 that field, perform the mask operation. Then compare with the
4272 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4273 ll_unsignedp || rl_unsignedp);
4275 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4276 if (! all_ones_mask_p (ll_mask, lnbitsize))
4277 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4279 return build (wanted_code, truth_type, result,
4280 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4283 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4287 optimize_minmax_comparison (tree t)
4289 tree type = TREE_TYPE (t);
4290 tree arg0 = TREE_OPERAND (t, 0);
4291 enum tree_code op_code;
4292 tree comp_const = TREE_OPERAND (t, 1);
4294 int consts_equal, consts_lt;
4297 STRIP_SIGN_NOPS (arg0);
4299 op_code = TREE_CODE (arg0);
4300 minmax_const = TREE_OPERAND (arg0, 1);
4301 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4302 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4303 inner = TREE_OPERAND (arg0, 0);
4305 /* If something does not permit us to optimize, return the original tree. */
4306 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4307 || TREE_CODE (comp_const) != INTEGER_CST
4308 || TREE_CONSTANT_OVERFLOW (comp_const)
4309 || TREE_CODE (minmax_const) != INTEGER_CST
4310 || TREE_CONSTANT_OVERFLOW (minmax_const))
4313 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4314 and GT_EXPR, doing the rest with recursive calls using logical
4316 switch (TREE_CODE (t))
4318 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4320 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4324 fold (build (TRUTH_ORIF_EXPR, type,
4325 optimize_minmax_comparison
4326 (build (EQ_EXPR, type, arg0, comp_const)),
4327 optimize_minmax_comparison
4328 (build (GT_EXPR, type, arg0, comp_const))));
4331 if (op_code == MAX_EXPR && consts_equal)
4332 /* MAX (X, 0) == 0 -> X <= 0 */
4333 return fold (build (LE_EXPR, type, inner, comp_const));
4335 else if (op_code == MAX_EXPR && consts_lt)
4336 /* MAX (X, 0) == 5 -> X == 5 */
4337 return fold (build (EQ_EXPR, type, inner, comp_const));
4339 else if (op_code == MAX_EXPR)
4340 /* MAX (X, 0) == -1 -> false */
4341 return omit_one_operand (type, integer_zero_node, inner);
4343 else if (consts_equal)
4344 /* MIN (X, 0) == 0 -> X >= 0 */
4345 return fold (build (GE_EXPR, type, inner, comp_const));
4348 /* MIN (X, 0) == 5 -> false */
4349 return omit_one_operand (type, integer_zero_node, inner);
4352 /* MIN (X, 0) == -1 -> X == -1 */
4353 return fold (build (EQ_EXPR, type, inner, comp_const));
4356 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4357 /* MAX (X, 0) > 0 -> X > 0
4358 MAX (X, 0) > 5 -> X > 5 */
4359 return fold (build (GT_EXPR, type, inner, comp_const));
4361 else if (op_code == MAX_EXPR)
4362 /* MAX (X, 0) > -1 -> true */
4363 return omit_one_operand (type, integer_one_node, inner);
4365 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4366 /* MIN (X, 0) > 0 -> false
4367 MIN (X, 0) > 5 -> false */
4368 return omit_one_operand (type, integer_zero_node, inner);
4371 /* MIN (X, 0) > -1 -> X > -1 */
4372 return fold (build (GT_EXPR, type, inner, comp_const));
4379 /* T is an integer expression that is being multiplied, divided, or taken a
4380 modulus (CODE says which and what kind of divide or modulus) by a
4381 constant C. See if we can eliminate that operation by folding it with
4382 other operations already in T. WIDE_TYPE, if non-null, is a type that
4383 should be used for the computation if wider than our type.
4385 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4386 (X * 2) + (Y * 4). We must, however, be assured that either the original
4387 expression would not overflow or that overflow is undefined for the type
4388 in the language in question.
4390 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4391 the machine has a multiply-accumulate insn or that this is part of an
4392 addressing calculation.
4394 If we return a non-null expression, it is an equivalent form of the
4395 original computation, but need not be in the original type. */
4398 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4400 /* To avoid exponential search depth, refuse to allow recursion past
4401 three levels. Beyond that (1) it's highly unlikely that we'll find
4402 something interesting and (2) we've probably processed it before
4403 when we built the inner expression. */
4412 ret = extract_muldiv_1 (t, c, code, wide_type);
4419 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4421 tree type = TREE_TYPE (t);
4422 enum tree_code tcode = TREE_CODE (t);
4423 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4424 > GET_MODE_SIZE (TYPE_MODE (type)))
4425 ? wide_type : type);
4427 int same_p = tcode == code;
4428 tree op0 = NULL_TREE, op1 = NULL_TREE;
4430 /* Don't deal with constants of zero here; they confuse the code below. */
4431 if (integer_zerop (c))
4434 if (TREE_CODE_CLASS (tcode) == '1')
4435 op0 = TREE_OPERAND (t, 0);
4437 if (TREE_CODE_CLASS (tcode) == '2')
4438 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4440 /* Note that we need not handle conditional operations here since fold
4441 already handles those cases. So just do arithmetic here. */
4445 /* For a constant, we can always simplify if we are a multiply
4446 or (for divide and modulus) if it is a multiple of our constant. */
4447 if (code == MULT_EXPR
4448 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4449 return const_binop (code, fold_convert (ctype, t),
4450 fold_convert (ctype, c), 0);
4453 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4454 /* If op0 is an expression ... */
4455 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4456 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4457 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4458 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4459 /* ... and is unsigned, and its type is smaller than ctype,
4460 then we cannot pass through as widening. */
4461 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4462 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4463 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4464 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4465 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4466 /* ... or this is a truncation (t is narrower than op0),
4467 then we cannot pass through this narrowing. */
4468 || (GET_MODE_SIZE (TYPE_MODE (type))
4469 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4470 /* ... or signedness changes for division or modulus,
4471 then we cannot pass through this conversion. */
4472 || (code != MULT_EXPR
4473 && (TREE_UNSIGNED (ctype)
4474 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4477 /* Pass the constant down and see if we can make a simplification. If
4478 we can, replace this expression with the inner simplification for
4479 possible later conversion to our or some other type. */
4480 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4481 && TREE_CODE (t2) == INTEGER_CST
4482 && ! TREE_CONSTANT_OVERFLOW (t2)
4483 && (0 != (t1 = extract_muldiv (op0, t2, code,
4485 ? ctype : NULL_TREE))))
4489 case NEGATE_EXPR: case ABS_EXPR:
4490 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4491 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4494 case MIN_EXPR: case MAX_EXPR:
4495 /* If widening the type changes the signedness, then we can't perform
4496 this optimization as that changes the result. */
4497 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4500 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4501 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4502 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4504 if (tree_int_cst_sgn (c) < 0)
4505 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4507 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4508 fold_convert (ctype, t2)));
4512 case WITH_RECORD_EXPR:
4513 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4514 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4515 TREE_OPERAND (t, 1));
4518 case LSHIFT_EXPR: case RSHIFT_EXPR:
4519 /* If the second operand is constant, this is a multiplication
4520 or floor division, by a power of two, so we can treat it that
4521 way unless the multiplier or divisor overflows. */
4522 if (TREE_CODE (op1) == INTEGER_CST
4523 /* const_binop may not detect overflow correctly,
4524 so check for it explicitly here. */
4525 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4526 && TREE_INT_CST_HIGH (op1) == 0
4527 && 0 != (t1 = fold_convert (ctype,
4528 const_binop (LSHIFT_EXPR,
4531 && ! TREE_OVERFLOW (t1))
4532 return extract_muldiv (build (tcode == LSHIFT_EXPR
4533 ? MULT_EXPR : FLOOR_DIV_EXPR,
4534 ctype, fold_convert (ctype, op0), t1),
4535 c, code, wide_type);
4538 case PLUS_EXPR: case MINUS_EXPR:
4539 /* See if we can eliminate the operation on both sides. If we can, we
4540 can return a new PLUS or MINUS. If we can't, the only remaining
4541 cases where we can do anything are if the second operand is a
4543 t1 = extract_muldiv (op0, c, code, wide_type);
4544 t2 = extract_muldiv (op1, c, code, wide_type);
4545 if (t1 != 0 && t2 != 0
4546 && (code == MULT_EXPR
4547 /* If not multiplication, we can only do this if both operands
4548 are divisible by c. */
4549 || (multiple_of_p (ctype, op0, c)
4550 && multiple_of_p (ctype, op1, c))))
4551 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4552 fold_convert (ctype, t2)));
4554 /* If this was a subtraction, negate OP1 and set it to be an addition.
4555 This simplifies the logic below. */
4556 if (tcode == MINUS_EXPR)
4557 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4559 if (TREE_CODE (op1) != INTEGER_CST)
4562 /* If either OP1 or C are negative, this optimization is not safe for
4563 some of the division and remainder types while for others we need
4564 to change the code. */
4565 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4567 if (code == CEIL_DIV_EXPR)
4568 code = FLOOR_DIV_EXPR;
4569 else if (code == FLOOR_DIV_EXPR)
4570 code = CEIL_DIV_EXPR;
4571 else if (code != MULT_EXPR
4572 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4576 /* If it's a multiply or a division/modulus operation of a multiple
4577 of our constant, do the operation and verify it doesn't overflow. */
4578 if (code == MULT_EXPR
4579 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4581 op1 = const_binop (code, fold_convert (ctype, op1),
4582 fold_convert (ctype, c), 0);
4583 /* We allow the constant to overflow with wrapping semantics. */
4585 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4591 /* If we have an unsigned type is not a sizetype, we cannot widen
4592 the operation since it will change the result if the original
4593 computation overflowed. */
4594 if (TREE_UNSIGNED (ctype)
4595 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4599 /* If we were able to eliminate our operation from the first side,
4600 apply our operation to the second side and reform the PLUS. */
4601 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4602 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4604 /* The last case is if we are a multiply. In that case, we can
4605 apply the distributive law to commute the multiply and addition
4606 if the multiplication of the constants doesn't overflow. */
4607 if (code == MULT_EXPR)
4608 return fold (build (tcode, ctype,
4609 fold (build (code, ctype,
4610 fold_convert (ctype, op0),
4611 fold_convert (ctype, c))),
4617 /* We have a special case here if we are doing something like
4618 (C * 8) % 4 since we know that's zero. */
4619 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4620 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4621 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4622 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4623 return omit_one_operand (type, integer_zero_node, op0);
4625 /* ... fall through ... */
4627 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4628 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4629 /* If we can extract our operation from the LHS, do so and return a
4630 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4631 do something only if the second operand is a constant. */
4633 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4634 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4635 fold_convert (ctype, op1)));
4636 else if (tcode == MULT_EXPR && code == MULT_EXPR
4637 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4638 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4639 fold_convert (ctype, t1)));
4640 else if (TREE_CODE (op1) != INTEGER_CST)
4643 /* If these are the same operation types, we can associate them
4644 assuming no overflow. */
4646 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4647 fold_convert (ctype, c), 0))
4648 && ! TREE_OVERFLOW (t1))
4649 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4651 /* If these operations "cancel" each other, we have the main
4652 optimizations of this pass, which occur when either constant is a
4653 multiple of the other, in which case we replace this with either an
4654 operation or CODE or TCODE.
4656 If we have an unsigned type that is not a sizetype, we cannot do
4657 this since it will change the result if the original computation
4659 if ((! TREE_UNSIGNED (ctype)
4660 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4662 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4663 || (tcode == MULT_EXPR
4664 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4665 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4667 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4668 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4669 fold_convert (ctype,
4670 const_binop (TRUNC_DIV_EXPR,
4672 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4673 return fold (build (code, ctype, fold_convert (ctype, op0),
4674 fold_convert (ctype,
4675 const_binop (TRUNC_DIV_EXPR,
4687 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4688 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4689 that we may sometimes modify the tree. */
4692 strip_compound_expr (tree t, tree s)
4694 enum tree_code code = TREE_CODE (t);
4696 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4697 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4698 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4699 return TREE_OPERAND (t, 1);
4701 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4702 don't bother handling any other types. */
4703 else if (code == COND_EXPR)
4705 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4706 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4707 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4709 else if (TREE_CODE_CLASS (code) == '1')
4710 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4711 else if (TREE_CODE_CLASS (code) == '<'
4712 || TREE_CODE_CLASS (code) == '2')
4714 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4715 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4721 /* Return a node which has the indicated constant VALUE (either 0 or
4722 1), and is of the indicated TYPE. */
4725 constant_boolean_node (int value, tree type)
4727 if (type == integer_type_node)
4728 return value ? integer_one_node : integer_zero_node;
4729 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4730 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4734 tree t = build_int_2 (value, 0);
4736 TREE_TYPE (t) = type;
4741 /* Utility function for the following routine, to see how complex a nesting of
4742 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4743 we don't care (to avoid spending too much time on complex expressions.). */
4746 count_cond (tree expr, int lim)
4750 if (TREE_CODE (expr) != COND_EXPR)
4755 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4756 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4757 return MIN (lim, 1 + ctrue + cfalse);
4760 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4761 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4762 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4763 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4764 COND is the first argument to CODE; otherwise (as in the example
4765 given here), it is the second argument. TYPE is the type of the
4766 original expression. */
4769 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4770 tree cond, tree arg, int cond_first_p)
4772 tree test, true_value, false_value;
4773 tree lhs = NULL_TREE;
4774 tree rhs = NULL_TREE;
4775 /* In the end, we'll produce a COND_EXPR. Both arms of the
4776 conditional expression will be binary operations. The left-hand
4777 side of the expression to be executed if the condition is true
4778 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4779 of the expression to be executed if the condition is true will be
4780 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4781 but apply to the expression to be executed if the conditional is
4787 /* These are the codes to use for the left-hand side and right-hand
4788 side of the COND_EXPR. Normally, they are the same as CODE. */
4789 enum tree_code lhs_code = code;
4790 enum tree_code rhs_code = code;
4791 /* And these are the types of the expressions. */
4792 tree lhs_type = type;
4793 tree rhs_type = type;
4798 true_rhs = false_rhs = &arg;
4799 true_lhs = &true_value;
4800 false_lhs = &false_value;
4804 true_lhs = false_lhs = &arg;
4805 true_rhs = &true_value;
4806 false_rhs = &false_value;
4809 if (TREE_CODE (cond) == COND_EXPR)
4811 test = TREE_OPERAND (cond, 0);
4812 true_value = TREE_OPERAND (cond, 1);
4813 false_value = TREE_OPERAND (cond, 2);
4814 /* If this operand throws an expression, then it does not make
4815 sense to try to perform a logical or arithmetic operation
4816 involving it. Instead of building `a + throw 3' for example,
4817 we simply build `a, throw 3'. */
4818 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4822 lhs_code = COMPOUND_EXPR;
4823 lhs_type = void_type_node;
4828 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4832 rhs_code = COMPOUND_EXPR;
4833 rhs_type = void_type_node;
4841 tree testtype = TREE_TYPE (cond);
4843 true_value = fold_convert (testtype, integer_one_node);
4844 false_value = fold_convert (testtype, integer_zero_node);
4847 /* If ARG is complex we want to make sure we only evaluate it once. Though
4848 this is only required if it is volatile, it might be more efficient even
4849 if it is not. However, if we succeed in folding one part to a constant,
4850 we do not need to make this SAVE_EXPR. Since we do this optimization
4851 primarily to see if we do end up with constant and this SAVE_EXPR
4852 interferes with later optimizations, suppressing it when we can is
4855 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4856 do so. Don't try to see if the result is a constant if an arm is a
4857 COND_EXPR since we get exponential behavior in that case. */
4859 if (saved_expr_p (arg))
4861 else if (lhs == 0 && rhs == 0
4862 && !TREE_CONSTANT (arg)
4863 && (*lang_hooks.decls.global_bindings_p) () == 0
4864 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4865 || TREE_SIDE_EFFECTS (arg)))
4867 if (TREE_CODE (true_value) != COND_EXPR)
4868 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4870 if (TREE_CODE (false_value) != COND_EXPR)
4871 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4873 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4874 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4876 arg = save_expr (arg);
4878 save = saved_expr_p (arg);
4883 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4885 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4887 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4889 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4890 ahead of the COND_EXPR we made. Otherwise we would have it only
4891 evaluated in one branch, with the other branch using the result
4892 but missing the evaluation code. Beware that the save_expr call
4893 above might not return a SAVE_EXPR, so testing the TREE_CODE
4894 of ARG is not enough to decide here. Â */
4896 return build (COMPOUND_EXPR, type,
4897 fold_convert (void_type_node, arg),
4898 strip_compound_expr (test, arg));
4900 return fold_convert (type, test);
4904 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4906 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4907 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4908 ADDEND is the same as X.
4910 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4911 and finite. The problematic cases are when X is zero, and its mode
4912 has signed zeros. In the case of rounding towards -infinity,
4913 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4914 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4917 fold_real_zero_addition_p (tree type, tree addend, int negate)
4919 if (!real_zerop (addend))
4922 /* Don't allow the fold with -fsignaling-nans. */
4923 if (HONOR_SNANS (TYPE_MODE (type)))
4926 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4927 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4930 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4931 if (TREE_CODE (addend) == REAL_CST
4932 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4935 /* The mode has signed zeros, and we have to honor their sign.
4936 In this situation, there is only one case we can return true for.
4937 X - 0 is the same as X unless rounding towards -infinity is
4939 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4942 /* Subroutine of fold() that checks comparisons of built-in math
4943 functions against real constants.
4945 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4946 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4947 is the type of the result and ARG0 and ARG1 are the operands of the
4948 comparison. ARG1 must be a TREE_REAL_CST.
4950 The function returns the constant folded tree if a simplification
4951 can be made, and NULL_TREE otherwise. */
4954 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4955 tree type, tree arg0, tree arg1)
4959 if (fcode == BUILT_IN_SQRT
4960 || fcode == BUILT_IN_SQRTF
4961 || fcode == BUILT_IN_SQRTL)
4963 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4964 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4966 c = TREE_REAL_CST (arg1);
4967 if (REAL_VALUE_NEGATIVE (c))
4969 /* sqrt(x) < y is always false, if y is negative. */
4970 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4971 return omit_one_operand (type,
4972 fold_convert (type, integer_zero_node),
4975 /* sqrt(x) > y is always true, if y is negative and we
4976 don't care about NaNs, i.e. negative values of x. */
4977 if (code == NE_EXPR || !HONOR_NANS (mode))
4978 return omit_one_operand (type,
4979 fold_convert (type, integer_one_node),
4982 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4983 return fold (build (GE_EXPR, type, arg,
4984 build_real (TREE_TYPE (arg), dconst0)));
4986 else if (code == GT_EXPR || code == GE_EXPR)
4990 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4991 real_convert (&c2, mode, &c2);
4993 if (REAL_VALUE_ISINF (c2))
4995 /* sqrt(x) > y is x == +Inf, when y is very large. */
4996 if (HONOR_INFINITIES (mode))
4997 return fold (build (EQ_EXPR, type, arg,
4998 build_real (TREE_TYPE (arg), c2)));
5000 /* sqrt(x) > y is always false, when y is very large
5001 and we don't care about infinities. */
5002 return omit_one_operand (type,
5003 fold_convert (type, integer_zero_node),
5007 /* sqrt(x) > c is the same as x > c*c. */
5008 return fold (build (code, type, arg,
5009 build_real (TREE_TYPE (arg), c2)));
5011 else if (code == LT_EXPR || code == LE_EXPR)
5015 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5016 real_convert (&c2, mode, &c2);
5018 if (REAL_VALUE_ISINF (c2))
5020 /* sqrt(x) < y is always true, when y is a very large
5021 value and we don't care about NaNs or Infinities. */
5022 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5023 return omit_one_operand (type,
5024 fold_convert (type, integer_one_node),
5027 /* sqrt(x) < y is x != +Inf when y is very large and we
5028 don't care about NaNs. */
5029 if (! HONOR_NANS (mode))
5030 return fold (build (NE_EXPR, type, arg,
5031 build_real (TREE_TYPE (arg), c2)));
5033 /* sqrt(x) < y is x >= 0 when y is very large and we
5034 don't care about Infinities. */
5035 if (! HONOR_INFINITIES (mode))
5036 return fold (build (GE_EXPR, type, arg,
5037 build_real (TREE_TYPE (arg), dconst0)));
5039 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5040 if ((*lang_hooks.decls.global_bindings_p) () != 0
5041 || CONTAINS_PLACEHOLDER_P (arg))
5044 arg = save_expr (arg);
5045 return fold (build (TRUTH_ANDIF_EXPR, type,
5046 fold (build (GE_EXPR, type, arg,
5047 build_real (TREE_TYPE (arg),
5049 fold (build (NE_EXPR, type, arg,
5050 build_real (TREE_TYPE (arg),
5054 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5055 if (! HONOR_NANS (mode))
5056 return fold (build (code, type, arg,
5057 build_real (TREE_TYPE (arg), c2)));
5059 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5060 if ((*lang_hooks.decls.global_bindings_p) () == 0
5061 && ! CONTAINS_PLACEHOLDER_P (arg))
5063 arg = save_expr (arg);
5064 return fold (build (TRUTH_ANDIF_EXPR, type,
5065 fold (build (GE_EXPR, type, arg,
5066 build_real (TREE_TYPE (arg),
5068 fold (build (code, type, arg,
5069 build_real (TREE_TYPE (arg),
5078 /* Subroutine of fold() that optimizes comparisons against Infinities,
5079 either +Inf or -Inf.
5081 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5082 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5083 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5085 The function returns the constant folded tree if a simplification
5086 can be made, and NULL_TREE otherwise. */
5089 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5091 enum machine_mode mode;
5092 REAL_VALUE_TYPE max;
5096 mode = TYPE_MODE (TREE_TYPE (arg0));
5098 /* For negative infinity swap the sense of the comparison. */
5099 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5101 code = swap_tree_comparison (code);
5106 /* x > +Inf is always false, if with ignore sNANs. */
5107 if (HONOR_SNANS (mode))
5109 return omit_one_operand (type,
5110 fold_convert (type, integer_zero_node),
5114 /* x <= +Inf is always true, if we don't case about NaNs. */
5115 if (! HONOR_NANS (mode))
5116 return omit_one_operand (type,
5117 fold_convert (type, integer_one_node),
5120 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5121 if ((*lang_hooks.decls.global_bindings_p) () == 0
5122 && ! CONTAINS_PLACEHOLDER_P (arg0))
5124 arg0 = save_expr (arg0);
5125 return fold (build (EQ_EXPR, type, arg0, arg0));
5131 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5132 real_maxval (&max, neg, mode);
5133 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5134 arg0, build_real (TREE_TYPE (arg0), max)));
5137 /* x < +Inf is always equal to x <= DBL_MAX. */
5138 real_maxval (&max, neg, mode);
5139 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5140 arg0, build_real (TREE_TYPE (arg0), max)));
5143 /* x != +Inf is always equal to !(x > DBL_MAX). */
5144 real_maxval (&max, neg, mode);
5145 if (! HONOR_NANS (mode))
5146 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5147 arg0, build_real (TREE_TYPE (arg0), max)));
5148 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5149 arg0, build_real (TREE_TYPE (arg0), max)));
5150 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5159 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5160 equality/inequality test, then return a simplified form of
5161 the test using shifts and logical operations. Otherwise return
5162 NULL. TYPE is the desired result type. */
5165 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5168 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5170 if (code == TRUTH_NOT_EXPR)
5172 code = TREE_CODE (arg0);
5173 if (code != NE_EXPR && code != EQ_EXPR)
5176 /* Extract the arguments of the EQ/NE. */
5177 arg1 = TREE_OPERAND (arg0, 1);
5178 arg0 = TREE_OPERAND (arg0, 0);
5180 /* This requires us to invert the code. */
5181 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5184 /* If this is testing a single bit, we can optimize the test. */
5185 if ((code == NE_EXPR || code == EQ_EXPR)
5186 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5187 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5189 tree inner = TREE_OPERAND (arg0, 0);
5190 tree type = TREE_TYPE (arg0);
5191 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5192 enum machine_mode operand_mode = TYPE_MODE (type);
5194 tree signed_type, unsigned_type, intermediate_type;
5197 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5198 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5199 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5200 if (arg00 != NULL_TREE
5201 /* This is only a win if casting to a signed type is cheap,
5202 i.e. when arg00's type is not a partial mode. */
5203 && TYPE_PRECISION (TREE_TYPE (arg00))
5204 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5206 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5207 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5208 fold_convert (stype, arg00),
5209 fold_convert (stype, integer_zero_node)));
5212 /* Otherwise we have (A & C) != 0 where C is a single bit,
5213 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5214 Similarly for (A & C) == 0. */
5216 /* If INNER is a right shift of a constant and it plus BITNUM does
5217 not overflow, adjust BITNUM and INNER. */
5218 if (TREE_CODE (inner) == RSHIFT_EXPR
5219 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5220 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5221 && bitnum < TYPE_PRECISION (type)
5222 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5223 bitnum - TYPE_PRECISION (type)))
5225 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5226 inner = TREE_OPERAND (inner, 0);
5229 /* If we are going to be able to omit the AND below, we must do our
5230 operations as unsigned. If we must use the AND, we have a choice.
5231 Normally unsigned is faster, but for some machines signed is. */
5232 #ifdef LOAD_EXTEND_OP
5233 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5238 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5239 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5240 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5241 inner = fold_convert (intermediate_type, inner);
5244 inner = build (RSHIFT_EXPR, intermediate_type,
5245 inner, size_int (bitnum));
5247 if (code == EQ_EXPR)
5248 inner = build (BIT_XOR_EXPR, intermediate_type,
5249 inner, integer_one_node);
5251 /* Put the AND last so it can combine with more things. */
5252 inner = build (BIT_AND_EXPR, intermediate_type,
5253 inner, integer_one_node);
5255 /* Make sure to return the proper type. */
5256 inner = fold_convert (result_type, inner);
5263 /* Check whether we are allowed to reorder operands arg0 and arg1,
5264 such that the evaluation of arg1 occurs before arg0. */
5267 reorder_operands_p (tree arg0, tree arg1)
5269 if (! flag_evaluation_order)
5271 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5273 return ! TREE_SIDE_EFFECTS (arg0)
5274 && ! TREE_SIDE_EFFECTS (arg1);
5277 /* Test whether it is preferable two swap two operands, ARG0 and
5278 ARG1, for example because ARG0 is an integer constant and ARG1
5279 isn't. If REORDER is true, only recommend swapping if we can
5280 evaluate the operands in reverse order. */
5283 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5285 STRIP_SIGN_NOPS (arg0);
5286 STRIP_SIGN_NOPS (arg1);
5288 if (TREE_CODE (arg1) == INTEGER_CST)
5290 if (TREE_CODE (arg0) == INTEGER_CST)
5293 if (TREE_CODE (arg1) == REAL_CST)
5295 if (TREE_CODE (arg0) == REAL_CST)
5298 if (TREE_CODE (arg1) == COMPLEX_CST)
5300 if (TREE_CODE (arg0) == COMPLEX_CST)
5303 if (TREE_CONSTANT (arg1))
5305 if (TREE_CONSTANT (arg0))
5311 if (reorder && flag_evaluation_order
5312 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5323 /* Perform constant folding and related simplification of EXPR.
5324 The related simplifications include x*1 => x, x*0 => 0, etc.,
5325 and application of the associative law.
5326 NOP_EXPR conversions may be removed freely (as long as we
5327 are careful not to change the C type of the overall expression)
5328 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5329 but we can constant-fold them if they have constant operands. */
5331 #ifdef ENABLE_FOLD_CHECKING
5332 # define fold(x) fold_1 (x)
5333 static tree fold_1 (tree);
5339 tree t = expr, orig_t;
5340 tree t1 = NULL_TREE;
5342 tree type = TREE_TYPE (expr);
5343 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5344 enum tree_code code = TREE_CODE (t);
5345 int kind = TREE_CODE_CLASS (code);
5347 /* WINS will be nonzero when the switch is done
5348 if all operands are constant. */
5351 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5352 Likewise for a SAVE_EXPR that's already been evaluated. */
5353 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5356 /* Return right away if a constant. */
5362 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5366 /* Special case for conversion ops that can have fixed point args. */
5367 arg0 = TREE_OPERAND (t, 0);
5369 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5371 STRIP_SIGN_NOPS (arg0);
5373 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5374 subop = TREE_REALPART (arg0);
5378 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5379 && TREE_CODE (subop) != REAL_CST)
5380 /* Note that TREE_CONSTANT isn't enough:
5381 static var addresses are constant but we can't
5382 do arithmetic on them. */
5385 else if (IS_EXPR_CODE_CLASS (kind))
5387 int len = first_rtl_op (code);
5389 for (i = 0; i < len; i++)
5391 tree op = TREE_OPERAND (t, i);
5395 continue; /* Valid for CALL_EXPR, at least. */
5397 if (kind == '<' || code == RSHIFT_EXPR)
5399 /* Signedness matters here. Perhaps we can refine this
5401 STRIP_SIGN_NOPS (op);
5404 /* Strip any conversions that don't change the mode. */
5407 if (TREE_CODE (op) == COMPLEX_CST)
5408 subop = TREE_REALPART (op);
5412 if (TREE_CODE (subop) != INTEGER_CST
5413 && TREE_CODE (subop) != REAL_CST)
5414 /* Note that TREE_CONSTANT isn't enough:
5415 static var addresses are constant but we can't
5416 do arithmetic on them. */
5426 /* If this is a commutative operation, and ARG0 is a constant, move it
5427 to ARG1 to reduce the number of tests below. */
5428 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5429 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5430 || code == BIT_AND_EXPR)
5431 && tree_swap_operands_p (arg0, arg1, true))
5432 return fold (build (code, type, TREE_OPERAND (t, 1),
5433 TREE_OPERAND (t, 0)));
5435 /* Now WINS is set as described above,
5436 ARG0 is the first operand of EXPR,
5437 and ARG1 is the second operand (if it has more than one operand).
5439 First check for cases where an arithmetic operation is applied to a
5440 compound, conditional, or comparison operation. Push the arithmetic
5441 operation inside the compound or conditional to see if any folding
5442 can then be done. Convert comparison to conditional for this purpose.
5443 The also optimizes non-constant cases that used to be done in
5446 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5447 one of the operands is a comparison and the other is a comparison, a
5448 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5449 code below would make the expression more complex. Change it to a
5450 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5451 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5453 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5454 || code == EQ_EXPR || code == NE_EXPR)
5455 && ((truth_value_p (TREE_CODE (arg0))
5456 && (truth_value_p (TREE_CODE (arg1))
5457 || (TREE_CODE (arg1) == BIT_AND_EXPR
5458 && integer_onep (TREE_OPERAND (arg1, 1)))))
5459 || (truth_value_p (TREE_CODE (arg1))
5460 && (truth_value_p (TREE_CODE (arg0))
5461 || (TREE_CODE (arg0) == BIT_AND_EXPR
5462 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5464 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5465 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5469 if (code == EQ_EXPR)
5470 t = invert_truthvalue (t);
5475 if (TREE_CODE_CLASS (code) == '1')
5477 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5478 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5479 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5480 else if (TREE_CODE (arg0) == COND_EXPR)
5482 tree arg01 = TREE_OPERAND (arg0, 1);
5483 tree arg02 = TREE_OPERAND (arg0, 2);
5484 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5485 arg01 = fold (build1 (code, type, arg01));
5486 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5487 arg02 = fold (build1 (code, type, arg02));
5488 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5491 /* If this was a conversion, and all we did was to move into
5492 inside the COND_EXPR, bring it back out. But leave it if
5493 it is a conversion from integer to integer and the
5494 result precision is no wider than a word since such a
5495 conversion is cheap and may be optimized away by combine,
5496 while it couldn't if it were outside the COND_EXPR. Then return
5497 so we don't get into an infinite recursion loop taking the
5498 conversion out and then back in. */
5500 if ((code == NOP_EXPR || code == CONVERT_EXPR
5501 || code == NON_LVALUE_EXPR)
5502 && TREE_CODE (t) == COND_EXPR
5503 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5504 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5505 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5506 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5507 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5508 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5509 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5511 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5512 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5513 t = build1 (code, type,
5515 TREE_TYPE (TREE_OPERAND
5516 (TREE_OPERAND (t, 1), 0)),
5517 TREE_OPERAND (t, 0),
5518 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5519 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5522 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5523 return fold (build (COND_EXPR, type, arg0,
5524 fold (build1 (code, type, integer_one_node)),
5525 fold (build1 (code, type, integer_zero_node))));
5527 else if (TREE_CODE_CLASS (code) == '<'
5528 && TREE_CODE (arg0) == COMPOUND_EXPR)
5529 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5530 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5531 else if (TREE_CODE_CLASS (code) == '<'
5532 && TREE_CODE (arg1) == COMPOUND_EXPR)
5533 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5534 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5535 else if (TREE_CODE_CLASS (code) == '2'
5536 || TREE_CODE_CLASS (code) == '<')
5538 if (TREE_CODE (arg1) == COMPOUND_EXPR
5539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5540 && ! TREE_SIDE_EFFECTS (arg0))
5541 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5542 fold (build (code, type,
5543 arg0, TREE_OPERAND (arg1, 1))));
5544 else if ((TREE_CODE (arg1) == COND_EXPR
5545 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5546 && TREE_CODE_CLASS (code) != '<'))
5547 && (TREE_CODE (arg0) != COND_EXPR
5548 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5549 && (! TREE_SIDE_EFFECTS (arg0)
5550 || ((*lang_hooks.decls.global_bindings_p) () == 0
5551 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5553 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5554 /*cond_first_p=*/0);
5555 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5556 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5557 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5558 else if ((TREE_CODE (arg0) == COND_EXPR
5559 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5560 && TREE_CODE_CLASS (code) != '<'))
5561 && (TREE_CODE (arg1) != COND_EXPR
5562 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5563 && (! TREE_SIDE_EFFECTS (arg1)
5564 || ((*lang_hooks.decls.global_bindings_p) () == 0
5565 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5567 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5568 /*cond_first_p=*/1);
5582 return fold (DECL_INITIAL (t));
5587 case FIX_TRUNC_EXPR:
5588 /* Other kinds of FIX are not handled properly by fold_convert. */
5590 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5591 return TREE_OPERAND (t, 0);
5593 /* Handle cases of two conversions in a row. */
5594 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5595 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5597 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5598 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5599 tree final_type = TREE_TYPE (t);
5600 int inside_int = INTEGRAL_TYPE_P (inside_type);
5601 int inside_ptr = POINTER_TYPE_P (inside_type);
5602 int inside_float = FLOAT_TYPE_P (inside_type);
5603 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5604 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5605 int inter_int = INTEGRAL_TYPE_P (inter_type);
5606 int inter_ptr = POINTER_TYPE_P (inter_type);
5607 int inter_float = FLOAT_TYPE_P (inter_type);
5608 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5609 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5610 int final_int = INTEGRAL_TYPE_P (final_type);
5611 int final_ptr = POINTER_TYPE_P (final_type);
5612 int final_float = FLOAT_TYPE_P (final_type);
5613 unsigned int final_prec = TYPE_PRECISION (final_type);
5614 int final_unsignedp = TREE_UNSIGNED (final_type);
5616 /* In addition to the cases of two conversions in a row
5617 handled below, if we are converting something to its own
5618 type via an object of identical or wider precision, neither
5619 conversion is needed. */
5620 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5621 && ((inter_int && final_int) || (inter_float && final_float))
5622 && inter_prec >= final_prec)
5623 return fold (build1 (code, final_type,
5624 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5626 /* Likewise, if the intermediate and final types are either both
5627 float or both integer, we don't need the middle conversion if
5628 it is wider than the final type and doesn't change the signedness
5629 (for integers). Avoid this if the final type is a pointer
5630 since then we sometimes need the inner conversion. Likewise if
5631 the outer has a precision not equal to the size of its mode. */
5632 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5633 || (inter_float && inside_float))
5634 && inter_prec >= inside_prec
5635 && (inter_float || inter_unsignedp == inside_unsignedp)
5636 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5637 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5639 return fold (build1 (code, final_type,
5640 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5642 /* If we have a sign-extension of a zero-extended value, we can
5643 replace that by a single zero-extension. */
5644 if (inside_int && inter_int && final_int
5645 && inside_prec < inter_prec && inter_prec < final_prec
5646 && inside_unsignedp && !inter_unsignedp)
5647 return fold (build1 (code, final_type,
5648 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5650 /* Two conversions in a row are not needed unless:
5651 - some conversion is floating-point (overstrict for now), or
5652 - the intermediate type is narrower than both initial and
5654 - the intermediate type and innermost type differ in signedness,
5655 and the outermost type is wider than the intermediate, or
5656 - the initial type is a pointer type and the precisions of the
5657 intermediate and final types differ, or
5658 - the final type is a pointer type and the precisions of the
5659 initial and intermediate types differ. */
5660 if (! inside_float && ! inter_float && ! final_float
5661 && (inter_prec > inside_prec || inter_prec > final_prec)
5662 && ! (inside_int && inter_int
5663 && inter_unsignedp != inside_unsignedp
5664 && inter_prec < final_prec)
5665 && ((inter_unsignedp && inter_prec > inside_prec)
5666 == (final_unsignedp && final_prec > inter_prec))
5667 && ! (inside_ptr && inter_prec != final_prec)
5668 && ! (final_ptr && inside_prec != inter_prec)
5669 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5670 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5672 return fold (build1 (code, final_type,
5673 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5676 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5677 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5678 /* Detect assigning a bitfield. */
5679 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5680 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5682 /* Don't leave an assignment inside a conversion
5683 unless assigning a bitfield. */
5684 tree prev = TREE_OPERAND (t, 0);
5687 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5688 /* First do the assignment, then return converted constant. */
5689 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5690 TREE_NO_UNUSED_WARNING (t) = 1;
5695 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5696 constants (if x has signed type, the sign bit cannot be set
5697 in c). This folds extension into the BIT_AND_EXPR. */
5698 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5699 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5700 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5701 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5703 tree and = TREE_OPERAND (t, 0);
5704 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5707 if (TREE_UNSIGNED (TREE_TYPE (and))
5708 || (TYPE_PRECISION (TREE_TYPE (t))
5709 <= TYPE_PRECISION (TREE_TYPE (and))))
5711 else if (TYPE_PRECISION (TREE_TYPE (and1))
5712 <= HOST_BITS_PER_WIDE_INT
5713 && host_integerp (and1, 1))
5715 unsigned HOST_WIDE_INT cst;
5717 cst = tree_low_cst (and1, 1);
5718 cst &= (HOST_WIDE_INT) -1
5719 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5720 change = (cst == 0);
5721 #ifdef LOAD_EXTEND_OP
5723 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5726 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5727 and0 = fold_convert (uns, and0);
5728 and1 = fold_convert (uns, and1);
5733 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5734 fold_convert (TREE_TYPE (t), and0),
5735 fold_convert (TREE_TYPE (t), and1)));
5738 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5739 return tem ? tem : t;
5741 case VIEW_CONVERT_EXPR:
5742 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5743 return build1 (VIEW_CONVERT_EXPR, type,
5744 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5748 if (TREE_CODE (arg0) == CONSTRUCTOR
5749 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5751 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5758 if (TREE_CONSTANT (t) != wins)
5762 TREE_CONSTANT (t) = wins;
5767 if (negate_expr_p (arg0))
5768 return fold_convert (type, negate_expr (arg0));
5774 if (TREE_CODE (arg0) == INTEGER_CST)
5776 /* If the value is unsigned, then the absolute value is
5777 the same as the ordinary value. */
5778 if (TREE_UNSIGNED (type))
5780 /* Similarly, if the value is non-negative. */
5781 else if (INT_CST_LT (integer_minus_one_node, arg0))
5783 /* If the value is negative, then the absolute value is
5787 unsigned HOST_WIDE_INT low;
5789 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5790 TREE_INT_CST_HIGH (arg0),
5792 t = build_int_2 (low, high);
5793 TREE_TYPE (t) = type;
5795 = (TREE_OVERFLOW (arg0)
5796 | force_fit_type (t, overflow));
5797 TREE_CONSTANT_OVERFLOW (t)
5798 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5801 else if (TREE_CODE (arg0) == REAL_CST)
5803 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5804 t = build_real (type,
5805 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5808 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5809 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5810 /* Convert fabs((double)float) into (double)fabsf(float). */
5811 else if (TREE_CODE (arg0) == NOP_EXPR
5812 && TREE_CODE (type) == REAL_TYPE)
5814 tree targ0 = strip_float_extensions (arg0);
5816 return fold_convert (type, fold (build1 (ABS_EXPR,
5820 else if (tree_expr_nonnegative_p (arg0))
5825 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5826 return fold_convert (type, arg0);
5827 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5828 return build (COMPLEX_EXPR, type,
5829 TREE_OPERAND (arg0, 0),
5830 negate_expr (TREE_OPERAND (arg0, 1)));
5831 else if (TREE_CODE (arg0) == COMPLEX_CST)
5832 return build_complex (type, TREE_REALPART (arg0),
5833 negate_expr (TREE_IMAGPART (arg0)));
5834 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5835 return fold (build (TREE_CODE (arg0), type,
5836 fold (build1 (CONJ_EXPR, type,
5837 TREE_OPERAND (arg0, 0))),
5838 fold (build1 (CONJ_EXPR,
5839 type, TREE_OPERAND (arg0, 1)))));
5840 else if (TREE_CODE (arg0) == CONJ_EXPR)
5841 return TREE_OPERAND (arg0, 0);
5847 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5848 ~ TREE_INT_CST_HIGH (arg0));
5849 TREE_TYPE (t) = type;
5850 force_fit_type (t, 0);
5851 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5852 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5854 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5855 return TREE_OPERAND (arg0, 0);
5859 /* A + (-B) -> A - B */
5860 if (TREE_CODE (arg1) == NEGATE_EXPR)
5861 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5862 /* (-A) + B -> B - A */
5863 if (TREE_CODE (arg0) == NEGATE_EXPR)
5864 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5865 else if (! FLOAT_TYPE_P (type))
5867 if (integer_zerop (arg1))
5868 return non_lvalue (fold_convert (type, arg0));
5870 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5871 with a constant, and the two constants have no bits in common,
5872 we should treat this as a BIT_IOR_EXPR since this may produce more
5874 if (TREE_CODE (arg0) == BIT_AND_EXPR
5875 && TREE_CODE (arg1) == BIT_AND_EXPR
5876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5877 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5878 && integer_zerop (const_binop (BIT_AND_EXPR,
5879 TREE_OPERAND (arg0, 1),
5880 TREE_OPERAND (arg1, 1), 0)))
5882 code = BIT_IOR_EXPR;
5886 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5887 (plus (plus (mult) (mult)) (foo)) so that we can
5888 take advantage of the factoring cases below. */
5889 if ((TREE_CODE (arg0) == PLUS_EXPR
5890 && TREE_CODE (arg1) == MULT_EXPR)
5891 || (TREE_CODE (arg1) == PLUS_EXPR
5892 && TREE_CODE (arg0) == MULT_EXPR))
5894 tree parg0, parg1, parg, marg;
5896 if (TREE_CODE (arg0) == PLUS_EXPR)
5897 parg = arg0, marg = arg1;
5899 parg = arg1, marg = arg0;
5900 parg0 = TREE_OPERAND (parg, 0);
5901 parg1 = TREE_OPERAND (parg, 1);
5905 if (TREE_CODE (parg0) == MULT_EXPR
5906 && TREE_CODE (parg1) != MULT_EXPR)
5907 return fold (build (PLUS_EXPR, type,
5908 fold (build (PLUS_EXPR, type,
5909 fold_convert (type, parg0),
5910 fold_convert (type, marg))),
5911 fold_convert (type, parg1)));
5912 if (TREE_CODE (parg0) != MULT_EXPR
5913 && TREE_CODE (parg1) == MULT_EXPR)
5914 return fold (build (PLUS_EXPR, type,
5915 fold (build (PLUS_EXPR, type,
5916 fold_convert (type, parg1),
5917 fold_convert (type, marg))),
5918 fold_convert (type, parg0)));
5921 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5923 tree arg00, arg01, arg10, arg11;
5924 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5926 /* (A * C) + (B * C) -> (A+B) * C.
5927 We are most concerned about the case where C is a constant,
5928 but other combinations show up during loop reduction. Since
5929 it is not difficult, try all four possibilities. */
5931 arg00 = TREE_OPERAND (arg0, 0);
5932 arg01 = TREE_OPERAND (arg0, 1);
5933 arg10 = TREE_OPERAND (arg1, 0);
5934 arg11 = TREE_OPERAND (arg1, 1);
5937 if (operand_equal_p (arg01, arg11, 0))
5938 same = arg01, alt0 = arg00, alt1 = arg10;
5939 else if (operand_equal_p (arg00, arg10, 0))
5940 same = arg00, alt0 = arg01, alt1 = arg11;
5941 else if (operand_equal_p (arg00, arg11, 0))
5942 same = arg00, alt0 = arg01, alt1 = arg10;
5943 else if (operand_equal_p (arg01, arg10, 0))
5944 same = arg01, alt0 = arg00, alt1 = arg11;
5946 /* No identical multiplicands; see if we can find a common
5947 power-of-two factor in non-power-of-two multiplies. This
5948 can help in multi-dimensional array access. */
5949 else if (TREE_CODE (arg01) == INTEGER_CST
5950 && TREE_CODE (arg11) == INTEGER_CST
5951 && TREE_INT_CST_HIGH (arg01) == 0
5952 && TREE_INT_CST_HIGH (arg11) == 0)
5954 HOST_WIDE_INT int01, int11, tmp;
5955 int01 = TREE_INT_CST_LOW (arg01);
5956 int11 = TREE_INT_CST_LOW (arg11);
5958 /* Move min of absolute values to int11. */
5959 if ((int01 >= 0 ? int01 : -int01)
5960 < (int11 >= 0 ? int11 : -int11))
5962 tmp = int01, int01 = int11, int11 = tmp;
5963 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5964 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5967 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5969 alt0 = fold (build (MULT_EXPR, type, arg00,
5970 build_int_2 (int01 / int11, 0)));
5977 return fold (build (MULT_EXPR, type,
5978 fold (build (PLUS_EXPR, type, alt0, alt1)),
5984 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5985 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5986 return non_lvalue (fold_convert (type, arg0));
5988 /* Likewise if the operands are reversed. */
5989 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5990 return non_lvalue (fold_convert (type, arg1));
5992 /* Convert x+x into x*2.0. */
5993 if (operand_equal_p (arg0, arg1, 0)
5994 && SCALAR_FLOAT_TYPE_P (type))
5995 return fold (build (MULT_EXPR, type, arg0,
5996 build_real (type, dconst2)));
5998 /* Convert x*c+x into x*(c+1). */
5999 if (flag_unsafe_math_optimizations
6000 && TREE_CODE (arg0) == MULT_EXPR
6001 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6002 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6003 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6007 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6008 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6009 return fold (build (MULT_EXPR, type, arg1,
6010 build_real (type, c)));
6013 /* Convert x+x*c into x*(c+1). */
6014 if (flag_unsafe_math_optimizations
6015 && TREE_CODE (arg1) == MULT_EXPR
6016 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6017 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6018 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6022 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6023 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6024 return fold (build (MULT_EXPR, type, arg0,
6025 build_real (type, c)));
6028 /* Convert x*c1+x*c2 into x*(c1+c2). */
6029 if (flag_unsafe_math_optimizations
6030 && TREE_CODE (arg0) == MULT_EXPR
6031 && TREE_CODE (arg1) == MULT_EXPR
6032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6033 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6034 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6035 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6036 && operand_equal_p (TREE_OPERAND (arg0, 0),
6037 TREE_OPERAND (arg1, 0), 0))
6039 REAL_VALUE_TYPE c1, c2;
6041 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6042 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6043 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6044 return fold (build (MULT_EXPR, type,
6045 TREE_OPERAND (arg0, 0),
6046 build_real (type, c1)));
6051 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6052 is a rotate of A by C1 bits. */
6053 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6054 is a rotate of A by B bits. */
6056 enum tree_code code0, code1;
6057 code0 = TREE_CODE (arg0);
6058 code1 = TREE_CODE (arg1);
6059 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6060 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6061 && operand_equal_p (TREE_OPERAND (arg0, 0),
6062 TREE_OPERAND (arg1, 0), 0)
6063 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6065 tree tree01, tree11;
6066 enum tree_code code01, code11;
6068 tree01 = TREE_OPERAND (arg0, 1);
6069 tree11 = TREE_OPERAND (arg1, 1);
6070 STRIP_NOPS (tree01);
6071 STRIP_NOPS (tree11);
6072 code01 = TREE_CODE (tree01);
6073 code11 = TREE_CODE (tree11);
6074 if (code01 == INTEGER_CST
6075 && code11 == INTEGER_CST
6076 && TREE_INT_CST_HIGH (tree01) == 0
6077 && TREE_INT_CST_HIGH (tree11) == 0
6078 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6079 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6080 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6081 code0 == LSHIFT_EXPR ? tree01 : tree11);
6082 else if (code11 == MINUS_EXPR)
6084 tree tree110, tree111;
6085 tree110 = TREE_OPERAND (tree11, 0);
6086 tree111 = TREE_OPERAND (tree11, 1);
6087 STRIP_NOPS (tree110);
6088 STRIP_NOPS (tree111);
6089 if (TREE_CODE (tree110) == INTEGER_CST
6090 && 0 == compare_tree_int (tree110,
6092 (TREE_TYPE (TREE_OPERAND
6094 && operand_equal_p (tree01, tree111, 0))
6095 return build ((code0 == LSHIFT_EXPR
6098 type, TREE_OPERAND (arg0, 0), tree01);
6100 else if (code01 == MINUS_EXPR)
6102 tree tree010, tree011;
6103 tree010 = TREE_OPERAND (tree01, 0);
6104 tree011 = TREE_OPERAND (tree01, 1);
6105 STRIP_NOPS (tree010);
6106 STRIP_NOPS (tree011);
6107 if (TREE_CODE (tree010) == INTEGER_CST
6108 && 0 == compare_tree_int (tree010,
6110 (TREE_TYPE (TREE_OPERAND
6112 && operand_equal_p (tree11, tree011, 0))
6113 return build ((code0 != LSHIFT_EXPR
6116 type, TREE_OPERAND (arg0, 0), tree11);
6122 /* In most languages, can't associate operations on floats through
6123 parentheses. Rather than remember where the parentheses were, we
6124 don't associate floats at all, unless the user has specified
6125 -funsafe-math-optimizations. */
6128 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6130 tree var0, con0, lit0, minus_lit0;
6131 tree var1, con1, lit1, minus_lit1;
6133 /* Split both trees into variables, constants, and literals. Then
6134 associate each group together, the constants with literals,
6135 then the result with variables. This increases the chances of
6136 literals being recombined later and of generating relocatable
6137 expressions for the sum of a constant and literal. */
6138 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6139 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6140 code == MINUS_EXPR);
6142 /* Only do something if we found more than two objects. Otherwise,
6143 nothing has changed and we risk infinite recursion. */
6144 if (2 < ((var0 != 0) + (var1 != 0)
6145 + (con0 != 0) + (con1 != 0)
6146 + (lit0 != 0) + (lit1 != 0)
6147 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6149 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6150 if (code == MINUS_EXPR)
6153 var0 = associate_trees (var0, var1, code, type);
6154 con0 = associate_trees (con0, con1, code, type);
6155 lit0 = associate_trees (lit0, lit1, code, type);
6156 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6158 /* Preserve the MINUS_EXPR if the negative part of the literal is
6159 greater than the positive part. Otherwise, the multiplicative
6160 folding code (i.e extract_muldiv) may be fooled in case
6161 unsigned constants are subtracted, like in the following
6162 example: ((X*2 + 4) - 8U)/2. */
6163 if (minus_lit0 && lit0)
6165 if (TREE_CODE (lit0) == INTEGER_CST
6166 && TREE_CODE (minus_lit0) == INTEGER_CST
6167 && tree_int_cst_lt (lit0, minus_lit0))
6169 minus_lit0 = associate_trees (minus_lit0, lit0,
6175 lit0 = associate_trees (lit0, minus_lit0,
6183 return fold_convert (type,
6184 associate_trees (var0, minus_lit0,
6188 con0 = associate_trees (con0, minus_lit0,
6190 return fold_convert (type,
6191 associate_trees (var0, con0,
6196 con0 = associate_trees (con0, lit0, code, type);
6197 return fold_convert (type, associate_trees (var0, con0,
6204 t1 = const_binop (code, arg0, arg1, 0);
6205 if (t1 != NULL_TREE)
6207 /* The return value should always have
6208 the same type as the original expression. */
6209 if (TREE_TYPE (t1) != TREE_TYPE (t))
6210 t1 = fold_convert (TREE_TYPE (t), t1);
6217 /* A - (-B) -> A + B */
6218 if (TREE_CODE (arg1) == NEGATE_EXPR)
6219 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6220 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6221 if (TREE_CODE (arg0) == NEGATE_EXPR
6222 && (FLOAT_TYPE_P (type)
6223 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6224 && negate_expr_p (arg1)
6225 && reorder_operands_p (arg0, arg1))
6226 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6227 TREE_OPERAND (arg0, 0)));
6229 if (! FLOAT_TYPE_P (type))
6231 if (! wins && integer_zerop (arg0))
6232 return negate_expr (fold_convert (type, arg1));
6233 if (integer_zerop (arg1))
6234 return non_lvalue (fold_convert (type, arg0));
6236 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6237 about the case where C is a constant, just try one of the
6238 four possibilities. */
6240 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6241 && operand_equal_p (TREE_OPERAND (arg0, 1),
6242 TREE_OPERAND (arg1, 1), 0))
6243 return fold (build (MULT_EXPR, type,
6244 fold (build (MINUS_EXPR, type,
6245 TREE_OPERAND (arg0, 0),
6246 TREE_OPERAND (arg1, 0))),
6247 TREE_OPERAND (arg0, 1)));
6249 /* Fold A - (A & B) into ~B & A. */
6250 if (!TREE_SIDE_EFFECTS (arg0)
6251 && TREE_CODE (arg1) == BIT_AND_EXPR)
6253 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6254 return fold (build (BIT_AND_EXPR, type,
6255 fold (build1 (BIT_NOT_EXPR, type,
6256 TREE_OPERAND (arg1, 0))),
6258 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6259 return fold (build (BIT_AND_EXPR, type,
6260 fold (build1 (BIT_NOT_EXPR, type,
6261 TREE_OPERAND (arg1, 1))),
6265 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6266 any power of 2 minus 1. */
6267 if (TREE_CODE (arg0) == BIT_AND_EXPR
6268 && TREE_CODE (arg1) == BIT_AND_EXPR
6269 && operand_equal_p (TREE_OPERAND (arg0, 0),
6270 TREE_OPERAND (arg1, 0), 0))
6272 tree mask0 = TREE_OPERAND (arg0, 1);
6273 tree mask1 = TREE_OPERAND (arg1, 1);
6274 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6276 if (operand_equal_p (tem, mask1, 0))
6278 tem = fold (build (BIT_XOR_EXPR, type,
6279 TREE_OPERAND (arg0, 0), mask1));
6280 return fold (build (MINUS_EXPR, type, tem, mask1));
6285 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6286 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6287 return non_lvalue (fold_convert (type, arg0));
6289 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6290 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6291 (-ARG1 + ARG0) reduces to -ARG1. */
6292 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6293 return negate_expr (fold_convert (type, arg1));
6295 /* Fold &x - &x. This can happen from &x.foo - &x.
6296 This is unsafe for certain floats even in non-IEEE formats.
6297 In IEEE, it is unsafe because it does wrong for NaNs.
6298 Also note that operand_equal_p is always false if an operand
6301 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6302 && operand_equal_p (arg0, arg1, 0))
6303 return fold_convert (type, integer_zero_node);
6308 /* (-A) * (-B) -> A * B */
6309 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6310 return fold (build (MULT_EXPR, type,
6311 TREE_OPERAND (arg0, 0),
6312 negate_expr (arg1)));
6313 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6314 return fold (build (MULT_EXPR, type,
6316 TREE_OPERAND (arg1, 0)));
6318 if (! FLOAT_TYPE_P (type))
6320 if (integer_zerop (arg1))
6321 return omit_one_operand (type, arg1, arg0);
6322 if (integer_onep (arg1))
6323 return non_lvalue (fold_convert (type, arg0));
6325 /* (a * (1 << b)) is (a << b) */
6326 if (TREE_CODE (arg1) == LSHIFT_EXPR
6327 && integer_onep (TREE_OPERAND (arg1, 0)))
6328 return fold (build (LSHIFT_EXPR, type, arg0,
6329 TREE_OPERAND (arg1, 1)));
6330 if (TREE_CODE (arg0) == LSHIFT_EXPR
6331 && integer_onep (TREE_OPERAND (arg0, 0)))
6332 return fold (build (LSHIFT_EXPR, type, arg1,
6333 TREE_OPERAND (arg0, 1)));
6335 if (TREE_CODE (arg1) == INTEGER_CST
6336 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6337 fold_convert (type, arg1),
6339 return fold_convert (type, tem);
6344 /* Maybe fold x * 0 to 0. The expressions aren't the same
6345 when x is NaN, since x * 0 is also NaN. Nor are they the
6346 same in modes with signed zeros, since multiplying a
6347 negative value by 0 gives -0, not +0. */
6348 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6349 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6350 && real_zerop (arg1))
6351 return omit_one_operand (type, arg1, arg0);
6352 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6353 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6354 && real_onep (arg1))
6355 return non_lvalue (fold_convert (type, arg0));
6357 /* Transform x * -1.0 into -x. */
6358 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6359 && real_minus_onep (arg1))
6360 return fold (build1 (NEGATE_EXPR, type, arg0));
6362 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6363 if (flag_unsafe_math_optimizations
6364 && TREE_CODE (arg0) == RDIV_EXPR
6365 && TREE_CODE (arg1) == REAL_CST
6366 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6368 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6371 return fold (build (RDIV_EXPR, type, tem,
6372 TREE_OPERAND (arg0, 1)));
6375 if (flag_unsafe_math_optimizations)
6377 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6378 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6380 /* Optimizations of sqrt(...)*sqrt(...). */
6381 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6382 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6383 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6385 tree sqrtfn, arg, arglist;
6386 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6387 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6389 /* Optimize sqrt(x)*sqrt(x) as x. */
6390 if (operand_equal_p (arg00, arg10, 0)
6391 && ! HONOR_SNANS (TYPE_MODE (type)))
6394 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6395 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6396 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6397 arglist = build_tree_list (NULL_TREE, arg);
6398 return build_function_call_expr (sqrtfn, arglist);
6401 /* Optimize expN(x)*expN(y) as expN(x+y). */
6402 if (fcode0 == fcode1
6403 && (fcode0 == BUILT_IN_EXP
6404 || fcode0 == BUILT_IN_EXPF
6405 || fcode0 == BUILT_IN_EXPL
6406 || fcode0 == BUILT_IN_EXP2
6407 || fcode0 == BUILT_IN_EXP2F
6408 || fcode0 == BUILT_IN_EXP2L
6409 || fcode0 == BUILT_IN_EXP10
6410 || fcode0 == BUILT_IN_EXP10F
6411 || fcode0 == BUILT_IN_EXP10L
6412 || fcode0 == BUILT_IN_POW10
6413 || fcode0 == BUILT_IN_POW10F
6414 || fcode0 == BUILT_IN_POW10L))
6416 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6417 tree arg = build (PLUS_EXPR, type,
6418 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6419 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6420 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6421 return build_function_call_expr (expfn, arglist);
6424 /* Optimizations of pow(...)*pow(...). */
6425 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6426 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6427 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6429 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6430 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6432 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6433 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6436 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6437 if (operand_equal_p (arg01, arg11, 0))
6439 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6440 tree arg = build (MULT_EXPR, type, arg00, arg10);
6441 tree arglist = tree_cons (NULL_TREE, fold (arg),
6442 build_tree_list (NULL_TREE,
6444 return build_function_call_expr (powfn, arglist);
6447 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6448 if (operand_equal_p (arg00, arg10, 0))
6450 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6451 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6452 tree arglist = tree_cons (NULL_TREE, arg00,
6453 build_tree_list (NULL_TREE,
6455 return build_function_call_expr (powfn, arglist);
6459 /* Optimize tan(x)*cos(x) as sin(x). */
6460 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6461 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6462 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6463 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6464 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6465 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6466 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6467 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6475 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6479 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6483 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6489 if (sinfn != NULL_TREE)
6490 return build_function_call_expr (sinfn,
6491 TREE_OPERAND (arg0, 1));
6494 /* Optimize x*pow(x,c) as pow(x,c+1). */
6495 if (fcode1 == BUILT_IN_POW
6496 || fcode1 == BUILT_IN_POWF
6497 || fcode1 == BUILT_IN_POWL)
6499 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6500 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6502 if (TREE_CODE (arg11) == REAL_CST
6503 && ! TREE_CONSTANT_OVERFLOW (arg11)
6504 && operand_equal_p (arg0, arg10, 0))
6506 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6510 c = TREE_REAL_CST (arg11);
6511 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6512 arg = build_real (type, c);
6513 arglist = build_tree_list (NULL_TREE, arg);
6514 arglist = tree_cons (NULL_TREE, arg0, arglist);
6515 return build_function_call_expr (powfn, arglist);
6519 /* Optimize pow(x,c)*x as pow(x,c+1). */
6520 if (fcode0 == BUILT_IN_POW
6521 || fcode0 == BUILT_IN_POWF
6522 || fcode0 == BUILT_IN_POWL)
6524 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6525 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6527 if (TREE_CODE (arg01) == REAL_CST
6528 && ! TREE_CONSTANT_OVERFLOW (arg01)
6529 && operand_equal_p (arg1, arg00, 0))
6531 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6535 c = TREE_REAL_CST (arg01);
6536 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6537 arg = build_real (type, c);
6538 arglist = build_tree_list (NULL_TREE, arg);
6539 arglist = tree_cons (NULL_TREE, arg1, arglist);
6540 return build_function_call_expr (powfn, arglist);
6544 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6546 && operand_equal_p (arg0, arg1, 0))
6550 if (type == double_type_node)
6551 powfn = implicit_built_in_decls[BUILT_IN_POW];
6552 else if (type == float_type_node)
6553 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6554 else if (type == long_double_type_node)
6555 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6561 tree arg = build_real (type, dconst2);
6562 tree arglist = build_tree_list (NULL_TREE, arg);
6563 arglist = tree_cons (NULL_TREE, arg0, arglist);
6564 return build_function_call_expr (powfn, arglist);
6573 if (integer_all_onesp (arg1))
6574 return omit_one_operand (type, arg1, arg0);
6575 if (integer_zerop (arg1))
6576 return non_lvalue (fold_convert (type, arg0));
6577 t1 = distribute_bit_expr (code, type, arg0, arg1);
6578 if (t1 != NULL_TREE)
6581 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6583 This results in more efficient code for machines without a NAND
6584 instruction. Combine will canonicalize to the first form
6585 which will allow use of NAND instructions provided by the
6586 backend if they exist. */
6587 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6588 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6590 return fold (build1 (BIT_NOT_EXPR, type,
6591 build (BIT_AND_EXPR, type,
6592 TREE_OPERAND (arg0, 0),
6593 TREE_OPERAND (arg1, 0))));
6596 /* See if this can be simplified into a rotate first. If that
6597 is unsuccessful continue in the association code. */
6601 if (integer_zerop (arg1))
6602 return non_lvalue (fold_convert (type, arg0));
6603 if (integer_all_onesp (arg1))
6604 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6606 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6607 with a constant, and the two constants have no bits in common,
6608 we should treat this as a BIT_IOR_EXPR since this may produce more
6610 if (TREE_CODE (arg0) == BIT_AND_EXPR
6611 && TREE_CODE (arg1) == BIT_AND_EXPR
6612 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6613 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6614 && integer_zerop (const_binop (BIT_AND_EXPR,
6615 TREE_OPERAND (arg0, 1),
6616 TREE_OPERAND (arg1, 1), 0)))
6618 code = BIT_IOR_EXPR;
6622 /* See if this can be simplified into a rotate first. If that
6623 is unsuccessful continue in the association code. */
6627 if (integer_all_onesp (arg1))
6628 return non_lvalue (fold_convert (type, arg0));
6629 if (integer_zerop (arg1))
6630 return omit_one_operand (type, arg1, arg0);
6631 t1 = distribute_bit_expr (code, type, arg0, arg1);
6632 if (t1 != NULL_TREE)
6634 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6635 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6636 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6639 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6641 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6642 && (~TREE_INT_CST_LOW (arg1)
6643 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6644 return fold_convert (type, TREE_OPERAND (arg0, 0));
6647 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6649 This results in more efficient code for machines without a NOR
6650 instruction. Combine will canonicalize to the first form
6651 which will allow use of NOR instructions provided by the
6652 backend if they exist. */
6653 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6654 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6656 return fold (build1 (BIT_NOT_EXPR, type,
6657 build (BIT_IOR_EXPR, type,
6658 TREE_OPERAND (arg0, 0),
6659 TREE_OPERAND (arg1, 0))));
6665 /* Don't touch a floating-point divide by zero unless the mode
6666 of the constant can represent infinity. */
6667 if (TREE_CODE (arg1) == REAL_CST
6668 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6669 && real_zerop (arg1))
6672 /* (-A) / (-B) -> A / B */
6673 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6674 return fold (build (RDIV_EXPR, type,
6675 TREE_OPERAND (arg0, 0),
6676 negate_expr (arg1)));
6677 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6678 return fold (build (RDIV_EXPR, type,
6680 TREE_OPERAND (arg1, 0)));
6682 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6683 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6684 && real_onep (arg1))
6685 return non_lvalue (fold_convert (type, arg0));
6687 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6688 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6689 && real_minus_onep (arg1))
6690 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6692 /* If ARG1 is a constant, we can convert this to a multiply by the
6693 reciprocal. This does not have the same rounding properties,
6694 so only do this if -funsafe-math-optimizations. We can actually
6695 always safely do it if ARG1 is a power of two, but it's hard to
6696 tell if it is or not in a portable manner. */
6697 if (TREE_CODE (arg1) == REAL_CST)
6699 if (flag_unsafe_math_optimizations
6700 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6702 return fold (build (MULT_EXPR, type, arg0, tem));
6703 /* Find the reciprocal if optimizing and the result is exact. */
6707 r = TREE_REAL_CST (arg1);
6708 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6710 tem = build_real (type, r);
6711 return fold (build (MULT_EXPR, type, arg0, tem));
6715 /* Convert A/B/C to A/(B*C). */
6716 if (flag_unsafe_math_optimizations
6717 && TREE_CODE (arg0) == RDIV_EXPR)
6718 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6719 fold (build (MULT_EXPR, type,
6720 TREE_OPERAND (arg0, 1), arg1))));
6722 /* Convert A/(B/C) to (A/B)*C. */
6723 if (flag_unsafe_math_optimizations
6724 && TREE_CODE (arg1) == RDIV_EXPR)
6725 return fold (build (MULT_EXPR, type,
6726 fold (build (RDIV_EXPR, type, arg0,
6727 TREE_OPERAND (arg1, 0))),
6728 TREE_OPERAND (arg1, 1)));
6730 /* Convert C1/(X*C2) into (C1/C2)/X. */
6731 if (flag_unsafe_math_optimizations
6732 && TREE_CODE (arg1) == MULT_EXPR
6733 && TREE_CODE (arg0) == REAL_CST
6734 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6736 tree tem = const_binop (RDIV_EXPR, arg0,
6737 TREE_OPERAND (arg1, 1), 0);
6739 return fold (build (RDIV_EXPR, type, tem,
6740 TREE_OPERAND (arg1, 0)));
6743 if (flag_unsafe_math_optimizations)
6745 enum built_in_function fcode = builtin_mathfn_code (arg1);
6746 /* Optimize x/expN(y) into x*expN(-y). */
6747 if (fcode == BUILT_IN_EXP
6748 || fcode == BUILT_IN_EXPF
6749 || fcode == BUILT_IN_EXPL
6750 || fcode == BUILT_IN_EXP2
6751 || fcode == BUILT_IN_EXP2F
6752 || fcode == BUILT_IN_EXP2L
6753 || fcode == BUILT_IN_EXP10
6754 || fcode == BUILT_IN_EXP10F
6755 || fcode == BUILT_IN_EXP10L
6756 || fcode == BUILT_IN_POW10
6757 || fcode == BUILT_IN_POW10F
6758 || fcode == BUILT_IN_POW10L)
6760 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6761 tree arg = build1 (NEGATE_EXPR, type,
6762 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6763 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6764 arg1 = build_function_call_expr (expfn, arglist);
6765 return fold (build (MULT_EXPR, type, arg0, arg1));
6768 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6769 if (fcode == BUILT_IN_POW
6770 || fcode == BUILT_IN_POWF
6771 || fcode == BUILT_IN_POWL)
6773 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6774 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6775 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6776 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6777 tree arglist = tree_cons(NULL_TREE, arg10,
6778 build_tree_list (NULL_TREE, neg11));
6779 arg1 = build_function_call_expr (powfn, arglist);
6780 return fold (build (MULT_EXPR, type, arg0, arg1));
6784 if (flag_unsafe_math_optimizations)
6786 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6787 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6789 /* Optimize sin(x)/cos(x) as tan(x). */
6790 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6791 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6792 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6793 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6794 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6798 if (fcode0 == BUILT_IN_SIN)
6799 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6800 else if (fcode0 == BUILT_IN_SINF)
6801 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6802 else if (fcode0 == BUILT_IN_SINL)
6803 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6807 if (tanfn != NULL_TREE)
6808 return build_function_call_expr (tanfn,
6809 TREE_OPERAND (arg0, 1));
6812 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6813 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6814 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6815 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6816 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6817 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6821 if (fcode0 == BUILT_IN_COS)
6822 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6823 else if (fcode0 == BUILT_IN_COSF)
6824 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6825 else if (fcode0 == BUILT_IN_COSL)
6826 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6830 if (tanfn != NULL_TREE)
6832 tree tmp = TREE_OPERAND (arg0, 1);
6833 tmp = build_function_call_expr (tanfn, tmp);
6834 return fold (build (RDIV_EXPR, type,
6835 build_real (type, dconst1),
6840 /* Optimize pow(x,c)/x as pow(x,c-1). */
6841 if (fcode0 == BUILT_IN_POW
6842 || fcode0 == BUILT_IN_POWF
6843 || fcode0 == BUILT_IN_POWL)
6845 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6846 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6847 if (TREE_CODE (arg01) == REAL_CST
6848 && ! TREE_CONSTANT_OVERFLOW (arg01)
6849 && operand_equal_p (arg1, arg00, 0))
6851 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6855 c = TREE_REAL_CST (arg01);
6856 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6857 arg = build_real (type, c);
6858 arglist = build_tree_list (NULL_TREE, arg);
6859 arglist = tree_cons (NULL_TREE, arg1, arglist);
6860 return build_function_call_expr (powfn, arglist);
6866 case TRUNC_DIV_EXPR:
6867 case ROUND_DIV_EXPR:
6868 case FLOOR_DIV_EXPR:
6870 case EXACT_DIV_EXPR:
6871 if (integer_onep (arg1))
6872 return non_lvalue (fold_convert (type, arg0));
6873 if (integer_zerop (arg1))
6876 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6877 operation, EXACT_DIV_EXPR.
6879 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6880 At one time others generated faster code, it's not clear if they do
6881 after the last round to changes to the DIV code in expmed.c. */
6882 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6883 && multiple_of_p (type, arg0, arg1))
6884 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6886 if (TREE_CODE (arg1) == INTEGER_CST
6887 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6889 return fold_convert (type, tem);
6894 case FLOOR_MOD_EXPR:
6895 case ROUND_MOD_EXPR:
6896 case TRUNC_MOD_EXPR:
6897 if (integer_onep (arg1))
6898 return omit_one_operand (type, integer_zero_node, arg0);
6899 if (integer_zerop (arg1))
6902 if (TREE_CODE (arg1) == INTEGER_CST
6903 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6905 return fold_convert (type, tem);
6911 if (integer_all_onesp (arg0))
6912 return omit_one_operand (type, arg0, arg1);
6916 /* Optimize -1 >> x for arithmetic right shifts. */
6917 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6918 return omit_one_operand (type, arg0, arg1);
6919 /* ... fall through ... */
6923 if (integer_zerop (arg1))
6924 return non_lvalue (fold_convert (type, arg0));
6925 if (integer_zerop (arg0))
6926 return omit_one_operand (type, arg0, arg1);
6928 /* Since negative shift count is not well-defined,
6929 don't try to compute it in the compiler. */
6930 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6932 /* Rewrite an LROTATE_EXPR by a constant into an
6933 RROTATE_EXPR by a new constant. */
6934 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6936 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6937 tem = fold_convert (TREE_TYPE (arg1), tem);
6938 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6939 return fold (build (RROTATE_EXPR, type, arg0, tem));
6942 /* If we have a rotate of a bit operation with the rotate count and
6943 the second operand of the bit operation both constant,
6944 permute the two operations. */
6945 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6946 && (TREE_CODE (arg0) == BIT_AND_EXPR
6947 || TREE_CODE (arg0) == BIT_IOR_EXPR
6948 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6950 return fold (build (TREE_CODE (arg0), type,
6951 fold (build (code, type,
6952 TREE_OPERAND (arg0, 0), arg1)),
6953 fold (build (code, type,
6954 TREE_OPERAND (arg0, 1), arg1))));
6956 /* Two consecutive rotates adding up to the width of the mode can
6958 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6959 && TREE_CODE (arg0) == RROTATE_EXPR
6960 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6961 && TREE_INT_CST_HIGH (arg1) == 0
6962 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6963 && ((TREE_INT_CST_LOW (arg1)
6964 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6965 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6966 return TREE_OPERAND (arg0, 0);
6971 if (operand_equal_p (arg0, arg1, 0))
6972 return omit_one_operand (type, arg0, arg1);
6973 if (INTEGRAL_TYPE_P (type)
6974 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6975 return omit_one_operand (type, arg1, arg0);
6979 if (operand_equal_p (arg0, arg1, 0))
6980 return omit_one_operand (type, arg0, arg1);
6981 if (INTEGRAL_TYPE_P (type)
6982 && TYPE_MAX_VALUE (type)
6983 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6984 return omit_one_operand (type, arg1, arg0);
6987 case TRUTH_NOT_EXPR:
6988 /* Note that the operand of this must be an int
6989 and its values must be 0 or 1.
6990 ("true" is a fixed value perhaps depending on the language,
6991 but we don't handle values other than 1 correctly yet.) */
6992 tem = invert_truthvalue (arg0);
6993 /* Avoid infinite recursion. */
6994 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6996 tem = fold_single_bit_test (code, arg0, arg1, type);
7001 return fold_convert (type, tem);
7003 case TRUTH_ANDIF_EXPR:
7004 /* Note that the operands of this must be ints
7005 and their values must be 0 or 1.
7006 ("true" is a fixed value perhaps depending on the language.) */
7007 /* If first arg is constant zero, return it. */
7008 if (integer_zerop (arg0))
7009 return fold_convert (type, arg0);
7010 case TRUTH_AND_EXPR:
7011 /* If either arg is constant true, drop it. */
7012 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7013 return non_lvalue (fold_convert (type, arg1));
7014 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7015 /* Preserve sequence points. */
7016 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7017 return non_lvalue (fold_convert (type, arg0));
7018 /* If second arg is constant zero, result is zero, but first arg
7019 must be evaluated. */
7020 if (integer_zerop (arg1))
7021 return omit_one_operand (type, arg1, arg0);
7022 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7023 case will be handled here. */
7024 if (integer_zerop (arg0))
7025 return omit_one_operand (type, arg0, arg1);
7028 /* We only do these simplifications if we are optimizing. */
7032 /* Check for things like (A || B) && (A || C). We can convert this
7033 to A || (B && C). Note that either operator can be any of the four
7034 truth and/or operations and the transformation will still be
7035 valid. Also note that we only care about order for the
7036 ANDIF and ORIF operators. If B contains side effects, this
7037 might change the truth-value of A. */
7038 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7039 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7040 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7041 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7042 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7043 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7045 tree a00 = TREE_OPERAND (arg0, 0);
7046 tree a01 = TREE_OPERAND (arg0, 1);
7047 tree a10 = TREE_OPERAND (arg1, 0);
7048 tree a11 = TREE_OPERAND (arg1, 1);
7049 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7050 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7051 && (code == TRUTH_AND_EXPR
7052 || code == TRUTH_OR_EXPR));
7054 if (operand_equal_p (a00, a10, 0))
7055 return fold (build (TREE_CODE (arg0), type, a00,
7056 fold (build (code, type, a01, a11))));
7057 else if (commutative && operand_equal_p (a00, a11, 0))
7058 return fold (build (TREE_CODE (arg0), type, a00,
7059 fold (build (code, type, a01, a10))));
7060 else if (commutative && operand_equal_p (a01, a10, 0))
7061 return fold (build (TREE_CODE (arg0), type, a01,
7062 fold (build (code, type, a00, a11))));
7064 /* This case if tricky because we must either have commutative
7065 operators or else A10 must not have side-effects. */
7067 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7068 && operand_equal_p (a01, a11, 0))
7069 return fold (build (TREE_CODE (arg0), type,
7070 fold (build (code, type, a00, a10)),
7074 /* See if we can build a range comparison. */
7075 if (0 != (tem = fold_range_test (t)))
7078 /* Check for the possibility of merging component references. If our
7079 lhs is another similar operation, try to merge its rhs with our
7080 rhs. Then try to merge our lhs and rhs. */
7081 if (TREE_CODE (arg0) == code
7082 && 0 != (tem = fold_truthop (code, type,
7083 TREE_OPERAND (arg0, 1), arg1)))
7084 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7086 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7091 case TRUTH_ORIF_EXPR:
7092 /* Note that the operands of this must be ints
7093 and their values must be 0 or true.
7094 ("true" is a fixed value perhaps depending on the language.) */
7095 /* If first arg is constant true, return it. */
7096 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7097 return fold_convert (type, arg0);
7099 /* If either arg is constant zero, drop it. */
7100 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7101 return non_lvalue (fold_convert (type, arg1));
7102 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7103 /* Preserve sequence points. */
7104 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7105 return non_lvalue (fold_convert (type, arg0));
7106 /* If second arg is constant true, result is true, but we must
7107 evaluate first arg. */
7108 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7109 return omit_one_operand (type, arg1, arg0);
7110 /* Likewise for first arg, but note this only occurs here for
7112 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7113 return omit_one_operand (type, arg0, arg1);
7116 case TRUTH_XOR_EXPR:
7117 /* If either arg is constant zero, drop it. */
7118 if (integer_zerop (arg0))
7119 return non_lvalue (fold_convert (type, arg1));
7120 if (integer_zerop (arg1))
7121 return non_lvalue (fold_convert (type, arg0));
7122 /* If either arg is constant true, this is a logical inversion. */
7123 if (integer_onep (arg0))
7124 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7125 if (integer_onep (arg1))
7126 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7135 /* If one arg is a real or integer constant, put it last. */
7136 if (tree_swap_operands_p (arg0, arg1, true))
7137 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7139 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7141 tree targ0 = strip_float_extensions (arg0);
7142 tree targ1 = strip_float_extensions (arg1);
7143 tree newtype = TREE_TYPE (targ0);
7145 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7146 newtype = TREE_TYPE (targ1);
7148 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7149 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7150 return fold (build (code, type, fold_convert (newtype, targ0),
7151 fold_convert (newtype, targ1)));
7153 /* (-a) CMP (-b) -> b CMP a */
7154 if (TREE_CODE (arg0) == NEGATE_EXPR
7155 && TREE_CODE (arg1) == NEGATE_EXPR)
7156 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7157 TREE_OPERAND (arg0, 0)));
7159 if (TREE_CODE (arg1) == REAL_CST)
7161 REAL_VALUE_TYPE cst;
7162 cst = TREE_REAL_CST (arg1);
7164 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7165 if (TREE_CODE (arg0) == NEGATE_EXPR)
7167 fold (build (swap_tree_comparison (code), type,
7168 TREE_OPERAND (arg0, 0),
7169 build_real (TREE_TYPE (arg1),
7170 REAL_VALUE_NEGATE (cst))));
7172 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7173 /* a CMP (-0) -> a CMP 0 */
7174 if (REAL_VALUE_MINUS_ZERO (cst))
7175 return fold (build (code, type, arg0,
7176 build_real (TREE_TYPE (arg1), dconst0)));
7178 /* x != NaN is always true, other ops are always false. */
7179 if (REAL_VALUE_ISNAN (cst)
7180 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7182 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7183 return omit_one_operand (type, fold_convert (type, t), arg0);
7186 /* Fold comparisons against infinity. */
7187 if (REAL_VALUE_ISINF (cst))
7189 tem = fold_inf_compare (code, type, arg0, arg1);
7190 if (tem != NULL_TREE)
7195 /* If this is a comparison of a real constant with a PLUS_EXPR
7196 or a MINUS_EXPR of a real constant, we can convert it into a
7197 comparison with a revised real constant as long as no overflow
7198 occurs when unsafe_math_optimizations are enabled. */
7199 if (flag_unsafe_math_optimizations
7200 && TREE_CODE (arg1) == REAL_CST
7201 && (TREE_CODE (arg0) == PLUS_EXPR
7202 || TREE_CODE (arg0) == MINUS_EXPR)
7203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7204 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7205 ? MINUS_EXPR : PLUS_EXPR,
7206 arg1, TREE_OPERAND (arg0, 1), 0))
7207 && ! TREE_CONSTANT_OVERFLOW (tem))
7208 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7210 /* Likewise, we can simplify a comparison of a real constant with
7211 a MINUS_EXPR whose first operand is also a real constant, i.e.
7212 (c1 - x) < c2 becomes x > c1-c2. */
7213 if (flag_unsafe_math_optimizations
7214 && TREE_CODE (arg1) == REAL_CST
7215 && TREE_CODE (arg0) == MINUS_EXPR
7216 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7217 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7219 && ! TREE_CONSTANT_OVERFLOW (tem))
7220 return fold (build (swap_tree_comparison (code), type,
7221 TREE_OPERAND (arg0, 1), tem));
7223 /* Fold comparisons against built-in math functions. */
7224 if (TREE_CODE (arg1) == REAL_CST
7225 && flag_unsafe_math_optimizations
7226 && ! flag_errno_math)
7228 enum built_in_function fcode = builtin_mathfn_code (arg0);
7230 if (fcode != END_BUILTINS)
7232 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7233 if (tem != NULL_TREE)
7239 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7240 if (TREE_CONSTANT (arg1)
7241 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7242 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7243 /* This optimization is invalid for ordered comparisons
7244 if CONST+INCR overflows or if foo+incr might overflow.
7245 This optimization is invalid for floating point due to rounding.
7246 For pointer types we assume overflow doesn't happen. */
7247 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7248 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7249 && (code == EQ_EXPR || code == NE_EXPR))))
7251 tree varop, newconst;
7253 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7255 newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7256 arg1, TREE_OPERAND (arg0, 1)));
7257 varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7258 TREE_OPERAND (arg0, 0),
7259 TREE_OPERAND (arg0, 1));
7263 newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7264 arg1, TREE_OPERAND (arg0, 1)));
7265 varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7266 TREE_OPERAND (arg0, 0),
7267 TREE_OPERAND (arg0, 1));
7271 /* If VAROP is a reference to a bitfield, we must mask
7272 the constant by the width of the field. */
7273 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7274 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7276 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7277 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7278 tree folded_compare, shift;
7280 /* First check whether the comparison would come out
7281 always the same. If we don't do that we would
7282 change the meaning with the masking. */
7283 folded_compare = fold (build (code, type,
7284 TREE_OPERAND (varop, 0),
7286 if (integer_zerop (folded_compare)
7287 || integer_onep (folded_compare))
7288 return omit_one_operand (type, folded_compare, varop);
7290 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7292 newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7294 newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7298 return fold (build (code, type, varop, newconst));
7301 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7302 This transformation affects the cases which are handled in later
7303 optimizations involving comparisons with non-negative constants. */
7304 if (TREE_CODE (arg1) == INTEGER_CST
7305 && TREE_CODE (arg0) != INTEGER_CST
7306 && tree_int_cst_sgn (arg1) > 0)
7311 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7312 return fold (build (GT_EXPR, type, arg0, arg1));
7315 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7316 return fold (build (LE_EXPR, type, arg0, arg1));
7323 /* Comparisons with the highest or lowest possible integer of
7324 the specified size will have known values. */
7326 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7328 if (TREE_CODE (arg1) == INTEGER_CST
7329 && ! TREE_CONSTANT_OVERFLOW (arg1)
7330 && width <= HOST_BITS_PER_WIDE_INT
7331 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7332 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7334 unsigned HOST_WIDE_INT signed_max;
7335 unsigned HOST_WIDE_INT max, min;
7337 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7339 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7341 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7347 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7350 if (TREE_INT_CST_HIGH (arg1) == 0
7351 && TREE_INT_CST_LOW (arg1) == max)
7355 return omit_one_operand (type,
7360 return fold (build (EQ_EXPR, type, arg0, arg1));
7363 return omit_one_operand (type,
7368 return fold (build (NE_EXPR, type, arg0, arg1));
7370 /* The GE_EXPR and LT_EXPR cases above are not normally
7371 reached because of previous transformations. */
7376 else if (TREE_INT_CST_HIGH (arg1) == 0
7377 && TREE_INT_CST_LOW (arg1) == max - 1)
7381 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7382 return fold (build (EQ_EXPR, type, arg0, arg1));
7384 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7385 return fold (build (NE_EXPR, type, arg0, arg1));
7389 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7390 && TREE_INT_CST_LOW (arg1) == min)
7394 return omit_one_operand (type,
7399 return fold (build (EQ_EXPR, type, arg0, arg1));
7402 return omit_one_operand (type,
7407 return fold (build (NE_EXPR, type, arg0, arg1));
7412 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7413 && TREE_INT_CST_LOW (arg1) == min + 1)
7417 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7418 return fold (build (NE_EXPR, type, arg0, arg1));
7420 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7421 return fold (build (EQ_EXPR, type, arg0, arg1));
7426 else if (TREE_INT_CST_HIGH (arg1) == 0
7427 && TREE_INT_CST_LOW (arg1) == signed_max
7428 && TREE_UNSIGNED (TREE_TYPE (arg1))
7429 /* signed_type does not work on pointer types. */
7430 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7432 /* The following case also applies to X < signed_max+1
7433 and X >= signed_max+1 because previous transformations. */
7434 if (code == LE_EXPR || code == GT_EXPR)
7437 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7438 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7440 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7441 type, fold_convert (st0, arg0),
7442 fold_convert (st1, integer_zero_node)));
7448 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7449 a MINUS_EXPR of a constant, we can convert it into a comparison with
7450 a revised constant as long as no overflow occurs. */
7451 if ((code == EQ_EXPR || code == NE_EXPR)
7452 && TREE_CODE (arg1) == INTEGER_CST
7453 && (TREE_CODE (arg0) == PLUS_EXPR
7454 || TREE_CODE (arg0) == MINUS_EXPR)
7455 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7456 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7457 ? MINUS_EXPR : PLUS_EXPR,
7458 arg1, TREE_OPERAND (arg0, 1), 0))
7459 && ! TREE_CONSTANT_OVERFLOW (tem))
7460 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7462 /* Similarly for a NEGATE_EXPR. */
7463 else if ((code == EQ_EXPR || code == NE_EXPR)
7464 && TREE_CODE (arg0) == NEGATE_EXPR
7465 && TREE_CODE (arg1) == INTEGER_CST
7466 && 0 != (tem = negate_expr (arg1))
7467 && TREE_CODE (tem) == INTEGER_CST
7468 && ! TREE_CONSTANT_OVERFLOW (tem))
7469 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7471 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7472 for !=. Don't do this for ordered comparisons due to overflow. */
7473 else if ((code == NE_EXPR || code == EQ_EXPR)
7474 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7475 return fold (build (code, type,
7476 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7478 /* If we are widening one operand of an integer comparison,
7479 see if the other operand is similarly being widened. Perhaps we
7480 can do the comparison in the narrower type. */
7481 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7482 && TREE_CODE (arg0) == NOP_EXPR
7483 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7484 && (code == EQ_EXPR || code == NE_EXPR
7485 || TREE_UNSIGNED (TREE_TYPE (arg0))
7486 == TREE_UNSIGNED (TREE_TYPE (tem)))
7487 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7488 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7489 || (TREE_CODE (t1) == INTEGER_CST
7490 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7491 return fold (build (code, type, tem,
7492 fold_convert (TREE_TYPE (tem), t1)));
7494 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7495 constant, we can simplify it. */
7496 else if (TREE_CODE (arg1) == INTEGER_CST
7497 && (TREE_CODE (arg0) == MIN_EXPR
7498 || TREE_CODE (arg0) == MAX_EXPR)
7499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7500 return optimize_minmax_comparison (t);
7502 /* If we are comparing an ABS_EXPR with a constant, we can
7503 convert all the cases into explicit comparisons, but they may
7504 well not be faster than doing the ABS and one comparison.
7505 But ABS (X) <= C is a range comparison, which becomes a subtraction
7506 and a comparison, and is probably faster. */
7507 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7508 && TREE_CODE (arg0) == ABS_EXPR
7509 && ! TREE_SIDE_EFFECTS (arg0)
7510 && (0 != (tem = negate_expr (arg1)))
7511 && TREE_CODE (tem) == INTEGER_CST
7512 && ! TREE_CONSTANT_OVERFLOW (tem))
7513 return fold (build (TRUTH_ANDIF_EXPR, type,
7514 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7515 build (LE_EXPR, type,
7516 TREE_OPERAND (arg0, 0), arg1)));
7518 /* If this is an EQ or NE comparison with zero and ARG0 is
7519 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7520 two operations, but the latter can be done in one less insn
7521 on machines that have only two-operand insns or on which a
7522 constant cannot be the first operand. */
7523 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7524 && TREE_CODE (arg0) == BIT_AND_EXPR)
7526 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7527 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7529 fold (build (code, type,
7530 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7532 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7533 TREE_OPERAND (arg0, 1),
7534 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7535 fold_convert (TREE_TYPE (arg0),
7538 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7539 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7541 fold (build (code, type,
7542 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7544 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7545 TREE_OPERAND (arg0, 0),
7546 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7547 fold_convert (TREE_TYPE (arg0),
7552 /* If this is an NE or EQ comparison of zero against the result of a
7553 signed MOD operation whose second operand is a power of 2, make
7554 the MOD operation unsigned since it is simpler and equivalent. */
7555 if ((code == NE_EXPR || code == EQ_EXPR)
7556 && integer_zerop (arg1)
7557 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7558 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7559 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7560 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7561 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7562 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7564 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7565 tree newmod = build (TREE_CODE (arg0), newtype,
7566 fold_convert (newtype,
7567 TREE_OPERAND (arg0, 0)),
7568 fold_convert (newtype,
7569 TREE_OPERAND (arg0, 1)));
7571 return build (code, type, newmod, fold_convert (newtype, arg1));
7574 /* If this is an NE comparison of zero with an AND of one, remove the
7575 comparison since the AND will give the correct value. */
7576 if (code == NE_EXPR && integer_zerop (arg1)
7577 && TREE_CODE (arg0) == BIT_AND_EXPR
7578 && integer_onep (TREE_OPERAND (arg0, 1)))
7579 return fold_convert (type, arg0);
7581 /* If we have (A & C) == C where C is a power of 2, convert this into
7582 (A & C) != 0. Similarly for NE_EXPR. */
7583 if ((code == EQ_EXPR || code == NE_EXPR)
7584 && TREE_CODE (arg0) == BIT_AND_EXPR
7585 && integer_pow2p (TREE_OPERAND (arg0, 1))
7586 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7587 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7588 arg0, integer_zero_node));
7590 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7591 2, then fold the expression into shifts and logical operations. */
7592 tem = fold_single_bit_test (code, arg0, arg1, type);
7596 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7597 Similarly for NE_EXPR. */
7598 if ((code == EQ_EXPR || code == NE_EXPR)
7599 && TREE_CODE (arg0) == BIT_AND_EXPR
7600 && TREE_CODE (arg1) == INTEGER_CST
7601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7604 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7605 arg1, build1 (BIT_NOT_EXPR,
7606 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7607 TREE_OPERAND (arg0, 1))));
7608 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7609 if (integer_nonzerop (dandnotc))
7610 return omit_one_operand (type, rslt, arg0);
7613 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7614 Similarly for NE_EXPR. */
7615 if ((code == EQ_EXPR || code == NE_EXPR)
7616 && TREE_CODE (arg0) == BIT_IOR_EXPR
7617 && TREE_CODE (arg1) == INTEGER_CST
7618 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7621 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7622 TREE_OPERAND (arg0, 1),
7623 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7624 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7625 if (integer_nonzerop (candnotd))
7626 return omit_one_operand (type, rslt, arg0);
7629 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7630 and similarly for >= into !=. */
7631 if ((code == LT_EXPR || code == GE_EXPR)
7632 && TREE_UNSIGNED (TREE_TYPE (arg0))
7633 && TREE_CODE (arg1) == LSHIFT_EXPR
7634 && integer_onep (TREE_OPERAND (arg1, 0)))
7635 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7636 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7637 TREE_OPERAND (arg1, 1)),
7638 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7640 else if ((code == LT_EXPR || code == GE_EXPR)
7641 && TREE_UNSIGNED (TREE_TYPE (arg0))
7642 && (TREE_CODE (arg1) == NOP_EXPR
7643 || TREE_CODE (arg1) == CONVERT_EXPR)
7644 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7645 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7647 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7648 fold_convert (TREE_TYPE (arg0),
7649 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7650 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7652 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7654 /* Simplify comparison of something with itself. (For IEEE
7655 floating-point, we can only do some of these simplifications.) */
7656 if (operand_equal_p (arg0, arg1, 0))
7661 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7662 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7663 return constant_boolean_node (1, type);
7668 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7669 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7670 return constant_boolean_node (1, type);
7671 return fold (build (EQ_EXPR, type, arg0, arg1));
7674 /* For NE, we can only do this simplification if integer
7675 or we don't honor IEEE floating point NaNs. */
7676 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7677 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7679 /* ... fall through ... */
7682 return constant_boolean_node (0, type);
7688 /* If we are comparing an expression that just has comparisons
7689 of two integer values, arithmetic expressions of those comparisons,
7690 and constants, we can simplify it. There are only three cases
7691 to check: the two values can either be equal, the first can be
7692 greater, or the second can be greater. Fold the expression for
7693 those three values. Since each value must be 0 or 1, we have
7694 eight possibilities, each of which corresponds to the constant 0
7695 or 1 or one of the six possible comparisons.
7697 This handles common cases like (a > b) == 0 but also handles
7698 expressions like ((x > y) - (y > x)) > 0, which supposedly
7699 occur in macroized code. */
7701 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7703 tree cval1 = 0, cval2 = 0;
7706 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7707 /* Don't handle degenerate cases here; they should already
7708 have been handled anyway. */
7709 && cval1 != 0 && cval2 != 0
7710 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7711 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7712 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7713 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7714 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7715 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7716 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7718 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7719 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7721 /* We can't just pass T to eval_subst in case cval1 or cval2
7722 was the same as ARG1. */
7725 = fold (build (code, type,
7726 eval_subst (arg0, cval1, maxval, cval2, minval),
7729 = fold (build (code, type,
7730 eval_subst (arg0, cval1, maxval, cval2, maxval),
7733 = fold (build (code, type,
7734 eval_subst (arg0, cval1, minval, cval2, maxval),
7737 /* All three of these results should be 0 or 1. Confirm they
7738 are. Then use those values to select the proper code
7741 if ((integer_zerop (high_result)
7742 || integer_onep (high_result))
7743 && (integer_zerop (equal_result)
7744 || integer_onep (equal_result))
7745 && (integer_zerop (low_result)
7746 || integer_onep (low_result)))
7748 /* Make a 3-bit mask with the high-order bit being the
7749 value for `>', the next for '=', and the low for '<'. */
7750 switch ((integer_onep (high_result) * 4)
7751 + (integer_onep (equal_result) * 2)
7752 + integer_onep (low_result))
7756 return omit_one_operand (type, integer_zero_node, arg0);
7777 return omit_one_operand (type, integer_one_node, arg0);
7780 t = build (code, type, cval1, cval2);
7782 return save_expr (t);
7789 /* If this is a comparison of a field, we may be able to simplify it. */
7790 if (((TREE_CODE (arg0) == COMPONENT_REF
7791 && (*lang_hooks.can_use_bit_fields_p) ())
7792 || TREE_CODE (arg0) == BIT_FIELD_REF)
7793 && (code == EQ_EXPR || code == NE_EXPR)
7794 /* Handle the constant case even without -O
7795 to make sure the warnings are given. */
7796 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7798 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7803 /* If this is a comparison of complex values and either or both sides
7804 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7805 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7806 This may prevent needless evaluations. */
7807 if ((code == EQ_EXPR || code == NE_EXPR)
7808 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7809 && (TREE_CODE (arg0) == COMPLEX_EXPR
7810 || TREE_CODE (arg1) == COMPLEX_EXPR
7811 || TREE_CODE (arg0) == COMPLEX_CST
7812 || TREE_CODE (arg1) == COMPLEX_CST))
7814 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7815 tree real0, imag0, real1, imag1;
7817 arg0 = save_expr (arg0);
7818 arg1 = save_expr (arg1);
7819 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7820 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7821 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7822 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7824 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7827 fold (build (code, type, real0, real1)),
7828 fold (build (code, type, imag0, imag1))));
7831 /* Optimize comparisons of strlen vs zero to a compare of the
7832 first character of the string vs zero. To wit,
7833 strlen(ptr) == 0 => *ptr == 0
7834 strlen(ptr) != 0 => *ptr != 0
7835 Other cases should reduce to one of these two (or a constant)
7836 due to the return value of strlen being unsigned. */
7837 if ((code == EQ_EXPR || code == NE_EXPR)
7838 && integer_zerop (arg1)
7839 && TREE_CODE (arg0) == CALL_EXPR)
7841 tree fndecl = get_callee_fndecl (arg0);
7845 && DECL_BUILT_IN (fndecl)
7846 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7847 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7848 && (arglist = TREE_OPERAND (arg0, 1))
7849 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7850 && ! TREE_CHAIN (arglist))
7851 return fold (build (code, type,
7852 build1 (INDIRECT_REF, char_type_node,
7853 TREE_VALUE(arglist)),
7854 integer_zero_node));
7857 /* From here on, the only cases we handle are when the result is
7858 known to be a constant.
7860 To compute GT, swap the arguments and do LT.
7861 To compute GE, do LT and invert the result.
7862 To compute LE, swap the arguments, do LT and invert the result.
7863 To compute NE, do EQ and invert the result.
7865 Therefore, the code below must handle only EQ and LT. */
7867 if (code == LE_EXPR || code == GT_EXPR)
7869 tem = arg0, arg0 = arg1, arg1 = tem;
7870 code = swap_tree_comparison (code);
7873 /* Note that it is safe to invert for real values here because we
7874 will check below in the one case that it matters. */
7878 if (code == NE_EXPR || code == GE_EXPR)
7881 code = invert_tree_comparison (code);
7884 /* Compute a result for LT or EQ if args permit;
7885 otherwise return T. */
7886 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7888 if (code == EQ_EXPR)
7889 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7891 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7892 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7893 : INT_CST_LT (arg0, arg1)),
7897 #if 0 /* This is no longer useful, but breaks some real code. */
7898 /* Assume a nonexplicit constant cannot equal an explicit one,
7899 since such code would be undefined anyway.
7900 Exception: on sysvr4, using #pragma weak,
7901 a label can come out as 0. */
7902 else if (TREE_CODE (arg1) == INTEGER_CST
7903 && !integer_zerop (arg1)
7904 && TREE_CONSTANT (arg0)
7905 && TREE_CODE (arg0) == ADDR_EXPR
7907 t1 = build_int_2 (0, 0);
7909 /* Two real constants can be compared explicitly. */
7910 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7912 /* If either operand is a NaN, the result is false with two
7913 exceptions: First, an NE_EXPR is true on NaNs, but that case
7914 is already handled correctly since we will be inverting the
7915 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7916 or a GE_EXPR into a LT_EXPR, we must return true so that it
7917 will be inverted into false. */
7919 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7920 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7921 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7923 else if (code == EQ_EXPR)
7924 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7925 TREE_REAL_CST (arg1)),
7928 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7929 TREE_REAL_CST (arg1)),
7933 if (t1 == NULL_TREE)
7937 TREE_INT_CST_LOW (t1) ^= 1;
7939 TREE_TYPE (t1) = type;
7940 if (TREE_CODE (type) == BOOLEAN_TYPE)
7941 return (*lang_hooks.truthvalue_conversion) (t1);
7945 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7946 so all simple results must be passed through pedantic_non_lvalue. */
7947 if (TREE_CODE (arg0) == INTEGER_CST)
7949 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7950 /* Only optimize constant conditions when the selected branch
7951 has the same type as the COND_EXPR. This avoids optimizing
7952 away "c ? x : throw", where the throw has a void type. */
7953 if (! VOID_TYPE_P (TREE_TYPE (tem))
7954 || VOID_TYPE_P (TREE_TYPE (t)))
7955 return pedantic_non_lvalue (tem);
7958 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7959 return pedantic_omit_one_operand (type, arg1, arg0);
7961 /* If we have A op B ? A : C, we may be able to convert this to a
7962 simpler expression, depending on the operation and the values
7963 of B and C. Signed zeros prevent all of these transformations,
7964 for reasons given above each one. */
7966 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7967 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7968 arg1, TREE_OPERAND (arg0, 1))
7969 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7971 tree arg2 = TREE_OPERAND (t, 2);
7972 enum tree_code comp_code = TREE_CODE (arg0);
7976 /* If we have A op 0 ? A : -A, consider applying the following
7979 A == 0? A : -A same as -A
7980 A != 0? A : -A same as A
7981 A >= 0? A : -A same as abs (A)
7982 A > 0? A : -A same as abs (A)
7983 A <= 0? A : -A same as -abs (A)
7984 A < 0? A : -A same as -abs (A)
7986 None of these transformations work for modes with signed
7987 zeros. If A is +/-0, the first two transformations will
7988 change the sign of the result (from +0 to -0, or vice
7989 versa). The last four will fix the sign of the result,
7990 even though the original expressions could be positive or
7991 negative, depending on the sign of A.
7993 Note that all these transformations are correct if A is
7994 NaN, since the two alternatives (A and -A) are also NaNs. */
7995 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7996 ? real_zerop (TREE_OPERAND (arg0, 1))
7997 : integer_zerop (TREE_OPERAND (arg0, 1)))
7998 && TREE_CODE (arg2) == NEGATE_EXPR
7999 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8003 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8004 tem = fold_convert (type, negate_expr (tem));
8005 return pedantic_non_lvalue (tem);
8007 return pedantic_non_lvalue (fold_convert (type, arg1));
8010 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8011 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8012 (TREE_TYPE (arg1)), arg1);
8013 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8014 return pedantic_non_lvalue (fold_convert (type, arg1));
8017 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8018 arg1 = fold_convert ((lang_hooks.types.signed_type)
8019 (TREE_TYPE (arg1)), arg1);
8020 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8021 arg1 = negate_expr (fold_convert (type, arg1));
8022 return pedantic_non_lvalue (arg1);
8027 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8028 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8029 both transformations are correct when A is NaN: A != 0
8030 is then true, and A == 0 is false. */
8032 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8034 if (comp_code == NE_EXPR)
8035 return pedantic_non_lvalue (fold_convert (type, arg1));
8036 else if (comp_code == EQ_EXPR)
8037 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8040 /* Try some transformations of A op B ? A : B.
8042 A == B? A : B same as B
8043 A != B? A : B same as A
8044 A >= B? A : B same as max (A, B)
8045 A > B? A : B same as max (B, A)
8046 A <= B? A : B same as min (A, B)
8047 A < B? A : B same as min (B, A)
8049 As above, these transformations don't work in the presence
8050 of signed zeros. For example, if A and B are zeros of
8051 opposite sign, the first two transformations will change
8052 the sign of the result. In the last four, the original
8053 expressions give different results for (A=+0, B=-0) and
8054 (A=-0, B=+0), but the transformed expressions do not.
8056 The first two transformations are correct if either A or B
8057 is a NaN. In the first transformation, the condition will
8058 be false, and B will indeed be chosen. In the case of the
8059 second transformation, the condition A != B will be true,
8060 and A will be chosen.
8062 The conversions to max() and min() are not correct if B is
8063 a number and A is not. The conditions in the original
8064 expressions will be false, so all four give B. The min()
8065 and max() versions would give a NaN instead. */
8066 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8067 arg2, TREE_OPERAND (arg0, 0)))
8069 tree comp_op0 = TREE_OPERAND (arg0, 0);
8070 tree comp_op1 = TREE_OPERAND (arg0, 1);
8071 tree comp_type = TREE_TYPE (comp_op0);
8073 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8074 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8084 return pedantic_non_lvalue (fold_convert (type, arg2));
8086 return pedantic_non_lvalue (fold_convert (type, arg1));
8089 /* In C++ a ?: expression can be an lvalue, so put the
8090 operand which will be used if they are equal first
8091 so that we can convert this back to the
8092 corresponding COND_EXPR. */
8093 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8094 return pedantic_non_lvalue (fold_convert
8095 (type, fold (build (MIN_EXPR, comp_type,
8096 (comp_code == LE_EXPR
8097 ? comp_op0 : comp_op1),
8098 (comp_code == LE_EXPR
8099 ? comp_op1 : comp_op0)))));
8103 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8104 return pedantic_non_lvalue (fold_convert
8105 (type, fold (build (MAX_EXPR, comp_type,
8106 (comp_code == GE_EXPR
8107 ? comp_op0 : comp_op1),
8108 (comp_code == GE_EXPR
8109 ? comp_op1 : comp_op0)))));
8116 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8117 we might still be able to simplify this. For example,
8118 if C1 is one less or one more than C2, this might have started
8119 out as a MIN or MAX and been transformed by this function.
8120 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8122 if (INTEGRAL_TYPE_P (type)
8123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8124 && TREE_CODE (arg2) == INTEGER_CST)
8128 /* We can replace A with C1 in this case. */
8129 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8130 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8131 TREE_OPERAND (t, 2)));
8134 /* If C1 is C2 + 1, this is min(A, C2). */
8135 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8136 && operand_equal_p (TREE_OPERAND (arg0, 1),
8137 const_binop (PLUS_EXPR, arg2,
8138 integer_one_node, 0), 1))
8139 return pedantic_non_lvalue
8140 (fold (build (MIN_EXPR, type, arg1, arg2)));
8144 /* If C1 is C2 - 1, this is min(A, C2). */
8145 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8146 && operand_equal_p (TREE_OPERAND (arg0, 1),
8147 const_binop (MINUS_EXPR, arg2,
8148 integer_one_node, 0), 1))
8149 return pedantic_non_lvalue
8150 (fold (build (MIN_EXPR, type, arg1, arg2)));
8154 /* If C1 is C2 - 1, this is max(A, C2). */
8155 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8156 && operand_equal_p (TREE_OPERAND (arg0, 1),
8157 const_binop (MINUS_EXPR, arg2,
8158 integer_one_node, 0), 1))
8159 return pedantic_non_lvalue
8160 (fold (build (MAX_EXPR, type, arg1, arg2)));
8164 /* If C1 is C2 + 1, this is max(A, C2). */
8165 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8166 && operand_equal_p (TREE_OPERAND (arg0, 1),
8167 const_binop (PLUS_EXPR, arg2,
8168 integer_one_node, 0), 1))
8169 return pedantic_non_lvalue
8170 (fold (build (MAX_EXPR, type, arg1, arg2)));
8179 /* If the second operand is simpler than the third, swap them
8180 since that produces better jump optimization results. */
8181 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8182 TREE_OPERAND (t, 2), false))
8184 /* See if this can be inverted. If it can't, possibly because
8185 it was a floating-point inequality comparison, don't do
8187 tem = invert_truthvalue (arg0);
8189 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8190 return fold (build (code, type, tem,
8191 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8194 /* Convert A ? 1 : 0 to simply A. */
8195 if (integer_onep (TREE_OPERAND (t, 1))
8196 && integer_zerop (TREE_OPERAND (t, 2))
8197 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8198 call to fold will try to move the conversion inside
8199 a COND, which will recurse. In that case, the COND_EXPR
8200 is probably the best choice, so leave it alone. */
8201 && type == TREE_TYPE (arg0))
8202 return pedantic_non_lvalue (arg0);
8204 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8205 over COND_EXPR in cases such as floating point comparisons. */
8206 if (integer_zerop (TREE_OPERAND (t, 1))
8207 && integer_onep (TREE_OPERAND (t, 2))
8208 && truth_value_p (TREE_CODE (arg0)))
8209 return pedantic_non_lvalue (fold_convert (type,
8210 invert_truthvalue (arg0)));
8212 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8213 operation is simply A & 2. */
8215 if (integer_zerop (TREE_OPERAND (t, 2))
8216 && TREE_CODE (arg0) == NE_EXPR
8217 && integer_zerop (TREE_OPERAND (arg0, 1))
8218 && integer_pow2p (arg1)
8219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8220 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8222 return pedantic_non_lvalue (fold_convert (type,
8223 TREE_OPERAND (arg0, 0)));
8225 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8226 if (integer_zerop (TREE_OPERAND (t, 2))
8227 && truth_value_p (TREE_CODE (arg0))
8228 && truth_value_p (TREE_CODE (arg1)))
8229 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8232 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8233 if (integer_onep (TREE_OPERAND (t, 2))
8234 && truth_value_p (TREE_CODE (arg0))
8235 && truth_value_p (TREE_CODE (arg1)))
8237 /* Only perform transformation if ARG0 is easily inverted. */
8238 tem = invert_truthvalue (arg0);
8239 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8240 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8247 /* When pedantic, a compound expression can be neither an lvalue
8248 nor an integer constant expression. */
8249 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8251 /* Don't let (0, 0) be null pointer constant. */
8252 if (integer_zerop (arg1))
8253 return build1 (NOP_EXPR, type, arg1);
8254 return fold_convert (type, arg1);
8258 return build_complex (type, arg0, arg1);
8262 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8264 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8265 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8266 TREE_OPERAND (arg0, 1));
8267 else if (TREE_CODE (arg0) == COMPLEX_CST)
8268 return TREE_REALPART (arg0);
8269 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8270 return fold (build (TREE_CODE (arg0), type,
8271 fold (build1 (REALPART_EXPR, type,
8272 TREE_OPERAND (arg0, 0))),
8273 fold (build1 (REALPART_EXPR,
8274 type, TREE_OPERAND (arg0, 1)))));
8278 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8279 return fold_convert (type, integer_zero_node);
8280 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8281 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8282 TREE_OPERAND (arg0, 0));
8283 else if (TREE_CODE (arg0) == COMPLEX_CST)
8284 return TREE_IMAGPART (arg0);
8285 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8286 return fold (build (TREE_CODE (arg0), type,
8287 fold (build1 (IMAGPART_EXPR, type,
8288 TREE_OPERAND (arg0, 0))),
8289 fold (build1 (IMAGPART_EXPR, type,
8290 TREE_OPERAND (arg0, 1)))));
8293 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8295 case CLEANUP_POINT_EXPR:
8296 if (! has_cleanups (arg0))
8297 return TREE_OPERAND (t, 0);
8300 enum tree_code code0 = TREE_CODE (arg0);
8301 int kind0 = TREE_CODE_CLASS (code0);
8302 tree arg00 = TREE_OPERAND (arg0, 0);
8305 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8306 return fold (build1 (code0, type,
8307 fold (build1 (CLEANUP_POINT_EXPR,
8308 TREE_TYPE (arg00), arg00))));
8310 if (kind0 == '<' || kind0 == '2'
8311 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8312 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8313 || code0 == TRUTH_XOR_EXPR)
8315 arg01 = TREE_OPERAND (arg0, 1);
8317 if (TREE_CONSTANT (arg00)
8318 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8319 && ! has_cleanups (arg00)))
8320 return fold (build (code0, type, arg00,
8321 fold (build1 (CLEANUP_POINT_EXPR,
8322 TREE_TYPE (arg01), arg01))));
8324 if (TREE_CONSTANT (arg01))
8325 return fold (build (code0, type,
8326 fold (build1 (CLEANUP_POINT_EXPR,
8327 TREE_TYPE (arg00), arg00)),
8335 /* Check for a built-in function. */
8336 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8337 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8339 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8341 tree tmp = fold_builtin (expr);
8349 } /* switch (code) */
8352 #ifdef ENABLE_FOLD_CHECKING
8355 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8356 static void fold_check_failed (tree, tree);
8357 void print_fold_checksum (tree);
8359 /* When --enable-checking=fold, compute a digest of expr before
8360 and after actual fold call to see if fold did not accidentally
8361 change original expr. */
8368 unsigned char checksum_before[16], checksum_after[16];
8371 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8372 md5_init_ctx (&ctx);
8373 fold_checksum_tree (expr, &ctx, ht);
8374 md5_finish_ctx (&ctx, checksum_before);
8377 ret = fold_1 (expr);
8379 md5_init_ctx (&ctx);
8380 fold_checksum_tree (expr, &ctx, ht);
8381 md5_finish_ctx (&ctx, checksum_after);
8384 if (memcmp (checksum_before, checksum_after, 16))
8385 fold_check_failed (expr, ret);
8391 print_fold_checksum (tree expr)
8394 unsigned char checksum[16], cnt;
8397 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8398 md5_init_ctx (&ctx);
8399 fold_checksum_tree (expr, &ctx, ht);
8400 md5_finish_ctx (&ctx, checksum);
8402 for (cnt = 0; cnt < 16; ++cnt)
8403 fprintf (stderr, "%02x", checksum[cnt]);
8404 putc ('\n', stderr);
8408 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8410 internal_error ("fold check: original tree changed by fold");
8414 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8417 enum tree_code code;
8418 char buf[sizeof (struct tree_decl)];
8421 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8422 > sizeof (struct tree_decl)
8423 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8427 slot = htab_find_slot (ht, expr, INSERT);
8431 code = TREE_CODE (expr);
8432 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8434 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8435 memcpy (buf, expr, tree_size (expr));
8437 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8439 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8441 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8442 memcpy (buf, expr, tree_size (expr));
8444 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8446 else if (TREE_CODE_CLASS (code) == 't'
8447 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8449 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8450 memcpy (buf, expr, tree_size (expr));
8452 TYPE_POINTER_TO (expr) = NULL;
8453 TYPE_REFERENCE_TO (expr) = NULL;
8455 md5_process_bytes (expr, tree_size (expr), ctx);
8456 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8457 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8458 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8459 len = TREE_CODE_LENGTH (code);
8460 switch (TREE_CODE_CLASS (code))
8466 md5_process_bytes (TREE_STRING_POINTER (expr),
8467 TREE_STRING_LENGTH (expr), ctx);
8470 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8471 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8474 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8484 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8485 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8488 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8489 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8498 case SAVE_EXPR: len = 2; break;
8499 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8500 case RTL_EXPR: len = 0; break;
8501 case WITH_CLEANUP_EXPR: len = 2; break;
8510 for (i = 0; i < len; ++i)
8511 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8514 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8515 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8516 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8517 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8518 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8519 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8520 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8521 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8522 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8523 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8524 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8527 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8528 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8529 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8530 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8531 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8532 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8533 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8534 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8535 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8536 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8545 /* Perform constant folding and related simplification of initializer
8546 expression EXPR. This behaves identically to "fold" but ignores
8547 potential run-time traps and exceptions that fold must preserve. */
8550 fold_initializer (tree expr)
8552 int saved_signaling_nans = flag_signaling_nans;
8553 int saved_trapping_math = flag_trapping_math;
8554 int saved_trapv = flag_trapv;
8557 flag_signaling_nans = 0;
8558 flag_trapping_math = 0;
8561 result = fold (expr);
8563 flag_signaling_nans = saved_signaling_nans;
8564 flag_trapping_math = saved_trapping_math;
8565 flag_trapv = saved_trapv;
8570 /* Determine if first argument is a multiple of second argument. Return 0 if
8571 it is not, or we cannot easily determined it to be.
8573 An example of the sort of thing we care about (at this point; this routine
8574 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8575 fold cases do now) is discovering that
8577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8583 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8585 This code also handles discovering that
8587 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8589 is a multiple of 8 so we don't have to worry about dealing with a
8592 Note that we *look* inside a SAVE_EXPR only to determine how it was
8593 calculated; it is not safe for fold to do much of anything else with the
8594 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8595 at run time. For example, the latter example above *cannot* be implemented
8596 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8597 evaluation time of the original SAVE_EXPR is not necessarily the same at
8598 the time the new expression is evaluated. The only optimization of this
8599 sort that would be valid is changing
8601 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8605 SAVE_EXPR (I) * SAVE_EXPR (J)
8607 (where the same SAVE_EXPR (J) is used in the original and the
8608 transformed version). */
8611 multiple_of_p (tree type, tree top, tree bottom)
8613 if (operand_equal_p (top, bottom, 0))
8616 if (TREE_CODE (type) != INTEGER_TYPE)
8619 switch (TREE_CODE (top))
8622 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8623 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8627 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8628 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8631 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8635 op1 = TREE_OPERAND (top, 1);
8636 /* const_binop may not detect overflow correctly,
8637 so check for it explicitly here. */
8638 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8639 > TREE_INT_CST_LOW (op1)
8640 && TREE_INT_CST_HIGH (op1) == 0
8641 && 0 != (t1 = fold_convert (type,
8642 const_binop (LSHIFT_EXPR,
8645 && ! TREE_OVERFLOW (t1))
8646 return multiple_of_p (type, t1, bottom);
8651 /* Can't handle conversions from non-integral or wider integral type. */
8652 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8653 || (TYPE_PRECISION (type)
8654 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8657 /* .. fall through ... */
8660 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8663 if (TREE_CODE (bottom) != INTEGER_CST
8664 || (TREE_UNSIGNED (type)
8665 && (tree_int_cst_sgn (top) < 0
8666 || tree_int_cst_sgn (bottom) < 0)))
8668 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8676 /* Return true if `t' is known to be non-negative. */
8679 tree_expr_nonnegative_p (tree t)
8681 switch (TREE_CODE (t))
8687 return tree_int_cst_sgn (t) >= 0;
8690 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8693 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8694 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8695 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8697 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8698 both unsigned and at least 2 bits shorter than the result. */
8699 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8700 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8701 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8703 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8704 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8705 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8706 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8708 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8709 TYPE_PRECISION (inner2)) + 1;
8710 return prec < TYPE_PRECISION (TREE_TYPE (t));
8716 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8718 /* x * x for floating point x is always non-negative. */
8719 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8721 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8722 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8725 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8726 both unsigned and their total bits is shorter than the result. */
8727 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8728 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8729 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8731 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8732 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8733 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8734 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8735 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8736 < TYPE_PRECISION (TREE_TYPE (t));
8740 case TRUNC_DIV_EXPR:
8742 case FLOOR_DIV_EXPR:
8743 case ROUND_DIV_EXPR:
8744 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8745 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8747 case TRUNC_MOD_EXPR:
8749 case FLOOR_MOD_EXPR:
8750 case ROUND_MOD_EXPR:
8751 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8754 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8755 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8759 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8760 tree outer_type = TREE_TYPE (t);
8762 if (TREE_CODE (outer_type) == REAL_TYPE)
8764 if (TREE_CODE (inner_type) == REAL_TYPE)
8765 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8766 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8768 if (TREE_UNSIGNED (inner_type))
8770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8773 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8775 if (TREE_CODE (inner_type) == REAL_TYPE)
8776 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8777 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8778 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8779 && TREE_UNSIGNED (inner_type);
8785 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8786 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8788 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8790 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8791 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8793 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8794 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8796 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8798 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8800 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8801 case NON_LVALUE_EXPR:
8802 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8804 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8806 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8810 tree fndecl = get_callee_fndecl (t);
8811 tree arglist = TREE_OPERAND (t, 1);
8813 && DECL_BUILT_IN (fndecl)
8814 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8815 switch (DECL_FUNCTION_CODE (fndecl))
8818 case BUILT_IN_CABSL:
8819 case BUILT_IN_CABSF:
8824 case BUILT_IN_EXP2F:
8825 case BUILT_IN_EXP2L:
8826 case BUILT_IN_EXP10:
8827 case BUILT_IN_EXP10F:
8828 case BUILT_IN_EXP10L:
8830 case BUILT_IN_FABSF:
8831 case BUILT_IN_FABSL:
8834 case BUILT_IN_FFSLL:
8835 case BUILT_IN_PARITY:
8836 case BUILT_IN_PARITYL:
8837 case BUILT_IN_PARITYLL:
8838 case BUILT_IN_POPCOUNT:
8839 case BUILT_IN_POPCOUNTL:
8840 case BUILT_IN_POPCOUNTLL:
8841 case BUILT_IN_POW10:
8842 case BUILT_IN_POW10F:
8843 case BUILT_IN_POW10L:
8845 case BUILT_IN_SQRTF:
8846 case BUILT_IN_SQRTL:
8850 case BUILT_IN_ATANF:
8851 case BUILT_IN_ATANL:
8853 case BUILT_IN_CEILF:
8854 case BUILT_IN_CEILL:
8855 case BUILT_IN_FLOOR:
8856 case BUILT_IN_FLOORF:
8857 case BUILT_IN_FLOORL:
8858 case BUILT_IN_NEARBYINT:
8859 case BUILT_IN_NEARBYINTF:
8860 case BUILT_IN_NEARBYINTL:
8861 case BUILT_IN_ROUND:
8862 case BUILT_IN_ROUNDF:
8863 case BUILT_IN_ROUNDL:
8864 case BUILT_IN_TRUNC:
8865 case BUILT_IN_TRUNCF:
8866 case BUILT_IN_TRUNCL:
8867 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8872 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8879 /* ... fall through ... */
8882 if (truth_value_p (TREE_CODE (t)))
8883 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8887 /* We don't know sign of `t', so be conservative and return false. */
8891 /* Return true if `r' is known to be non-negative.
8892 Only handles constants at the moment. */
8895 rtl_expr_nonnegative_p (rtx r)
8897 switch (GET_CODE (r))
8900 return INTVAL (r) >= 0;
8903 if (GET_MODE (r) == VOIDmode)
8904 return CONST_DOUBLE_HIGH (r) >= 0;
8912 units = CONST_VECTOR_NUNITS (r);
8914 for (i = 0; i < units; ++i)
8916 elt = CONST_VECTOR_ELT (r, i);
8917 if (!rtl_expr_nonnegative_p (elt))
8926 /* These are always nonnegative. */
8934 #include "gt-fold-const.h"