1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t) != INTEGER_CST)
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
270 h = h1 + h2 + (l < l1);
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 return (*hv & h1) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
327 for (j = 0; j < 4; j++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
838 /* Determine whether an expression T can be cheaply negated using
839 the function negate_expr. */
842 negate_expr_p (tree t)
844 unsigned HOST_WIDE_INT val;
851 type = TREE_TYPE (t);
854 switch (TREE_CODE (t))
857 if (TREE_UNSIGNED (type) || ! flag_trapv)
860 /* Check that -CST will not overflow type. */
861 prec = TYPE_PRECISION (type);
862 if (prec > HOST_BITS_PER_WIDE_INT)
864 if (TREE_INT_CST_LOW (t) != 0)
866 prec -= HOST_BITS_PER_WIDE_INT;
867 val = TREE_INT_CST_HIGH (t);
870 val = TREE_INT_CST_LOW (t);
871 if (prec < HOST_BITS_PER_WIDE_INT)
872 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
880 return negate_expr_p (TREE_REALPART (t))
881 && negate_expr_p (TREE_IMAGPART (t));
884 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
885 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1));
890 if (TREE_UNSIGNED (TREE_TYPE (t)))
896 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897 return negate_expr_p (TREE_OPERAND (t, 1))
898 || negate_expr_p (TREE_OPERAND (t, 0));
902 /* Negate -((double)float) as (double)(-float). */
903 if (TREE_CODE (type) == REAL_TYPE)
905 tree tem = strip_float_extensions (t);
907 return negate_expr_p (tem);
912 /* Negate -f(x) as f(-x). */
913 if (negate_mathfn_p (builtin_mathfn_code (t)))
914 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
923 /* Given T, an expression, return the negation of T. Allow for T to be
924 null, in which case return null. */
935 type = TREE_TYPE (t);
938 switch (TREE_CODE (t))
942 unsigned HOST_WIDE_INT low;
944 int overflow = neg_double (TREE_INT_CST_LOW (t),
945 TREE_INT_CST_HIGH (t),
947 tem = build_int_2 (low, high);
948 TREE_TYPE (tem) = type;
951 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952 TREE_CONSTANT_OVERFLOW (tem)
953 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
955 if (! TREE_OVERFLOW (tem)
956 || TREE_UNSIGNED (type)
962 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963 /* Two's complement FP formats, such as c4x, may overflow. */
964 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965 return fold_convert (type, tem);
970 tree rpart = negate_expr (TREE_REALPART (t));
971 tree ipart = negate_expr (TREE_IMAGPART (t));
973 if ((TREE_CODE (rpart) == REAL_CST
974 && TREE_CODE (ipart) == REAL_CST)
975 || (TREE_CODE (rpart) == INTEGER_CST
976 && TREE_CODE (ipart) == INTEGER_CST))
977 return build_complex (type, rpart, ipart);
982 return fold_convert (type, TREE_OPERAND (t, 0));
985 /* - (A - B) -> B - A */
986 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988 return fold_convert (type,
989 fold (build (MINUS_EXPR, TREE_TYPE (t),
991 TREE_OPERAND (t, 0))));
995 if (TREE_UNSIGNED (TREE_TYPE (t)))
1001 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1003 tem = TREE_OPERAND (t, 1);
1004 if (negate_expr_p (tem))
1005 return fold_convert (type,
1006 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007 TREE_OPERAND (t, 0),
1008 negate_expr (tem))));
1009 tem = TREE_OPERAND (t, 0);
1010 if (negate_expr_p (tem))
1011 return fold_convert (type,
1012 fold (build (TREE_CODE (t), TREE_TYPE (t),
1014 TREE_OPERAND (t, 1))));
1019 /* Convert -((double)float) into (double)(-float). */
1020 if (TREE_CODE (type) == REAL_TYPE)
1022 tem = strip_float_extensions (t);
1023 if (tem != t && negate_expr_p (tem))
1024 return fold_convert (type, negate_expr (tem));
1029 /* Negate -f(x) as f(-x). */
1030 if (negate_mathfn_p (builtin_mathfn_code (t))
1031 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1033 tree fndecl, arg, arglist;
1035 fndecl = get_callee_fndecl (t);
1036 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037 arglist = build_tree_list (NULL_TREE, arg);
1038 return build_function_call_expr (fndecl, arglist);
1046 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047 return fold_convert (type, tem);
1050 /* Split a tree IN into a constant, literal and variable parts that could be
1051 combined with CODE to make IN. "constant" means an expression with
1052 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1053 commutative arithmetic operation. Store the constant part into *CONP,
1054 the literal in *LITP and return the variable part. If a part isn't
1055 present, set it to null. If the tree does not decompose in this way,
1056 return the entire tree as the variable part and the other parts as null.
1058 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1059 case, we negate an operand that was subtracted. Except if it is a
1060 literal for which we use *MINUS_LITP instead.
1062 If NEGATE_P is true, we are negating all of IN, again except a literal
1063 for which we use *MINUS_LITP instead.
1065 If IN is itself a literal or constant, return it as appropriate.
1067 Note that we do not guarantee that any of the three values will be the
1068 same type as IN, but they will have the same signedness and mode. */
1071 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072 tree *minus_litp, int negate_p)
1080 /* Strip any conversions that don't change the machine mode or signedness. */
1081 STRIP_SIGN_NOPS (in);
1083 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1085 else if (TREE_CODE (in) == code
1086 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087 /* We can associate addition and subtraction together (even
1088 though the C standard doesn't say so) for integers because
1089 the value is not affected. For reals, the value might be
1090 affected, so we can't. */
1091 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1094 tree op0 = TREE_OPERAND (in, 0);
1095 tree op1 = TREE_OPERAND (in, 1);
1096 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1099 /* First see if either of the operands is a literal, then a constant. */
1100 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101 *litp = op0, op0 = 0;
1102 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1105 if (op0 != 0 && TREE_CONSTANT (op0))
1106 *conp = op0, op0 = 0;
1107 else if (op1 != 0 && TREE_CONSTANT (op1))
1108 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1110 /* If we haven't dealt with either operand, this is not a case we can
1111 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1112 if (op0 != 0 && op1 != 0)
1117 var = op1, neg_var_p = neg1_p;
1119 /* Now do any needed negations. */
1121 *minus_litp = *litp, *litp = 0;
1123 *conp = negate_expr (*conp);
1125 var = negate_expr (var);
1127 else if (TREE_CONSTANT (in))
1135 *minus_litp = *litp, *litp = 0;
1136 else if (*minus_litp)
1137 *litp = *minus_litp, *minus_litp = 0;
1138 *conp = negate_expr (*conp);
1139 var = negate_expr (var);
1145 /* Re-associate trees split by the above function. T1 and T2 are either
1146 expressions to associate or null. Return the new expression, if any. If
1147 we build an operation, do it in TYPE and with CODE. */
1150 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1157 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158 try to fold this since we will have infinite recursion. But do
1159 deal with any NEGATE_EXPRs. */
1160 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1163 if (code == PLUS_EXPR)
1165 if (TREE_CODE (t1) == NEGATE_EXPR)
1166 return build (MINUS_EXPR, type, fold_convert (type, t2),
1167 fold_convert (type, TREE_OPERAND (t1, 0)));
1168 else if (TREE_CODE (t2) == NEGATE_EXPR)
1169 return build (MINUS_EXPR, type, fold_convert (type, t1),
1170 fold_convert (type, TREE_OPERAND (t2, 0)));
1172 return build (code, type, fold_convert (type, t1),
1173 fold_convert (type, t2));
1176 return fold (build (code, type, fold_convert (type, t1),
1177 fold_convert (type, t2)));
1180 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1181 to produce a new constant.
1183 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1186 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1188 unsigned HOST_WIDE_INT int1l, int2l;
1189 HOST_WIDE_INT int1h, int2h;
1190 unsigned HOST_WIDE_INT low;
1192 unsigned HOST_WIDE_INT garbagel;
1193 HOST_WIDE_INT garbageh;
1195 tree type = TREE_TYPE (arg1);
1196 int uns = TREE_UNSIGNED (type);
1198 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1200 int no_overflow = 0;
1202 int1l = TREE_INT_CST_LOW (arg1);
1203 int1h = TREE_INT_CST_HIGH (arg1);
1204 int2l = TREE_INT_CST_LOW (arg2);
1205 int2h = TREE_INT_CST_HIGH (arg2);
1210 low = int1l | int2l, hi = int1h | int2h;
1214 low = int1l ^ int2l, hi = int1h ^ int2h;
1218 low = int1l & int2l, hi = int1h & int2h;
1224 /* It's unclear from the C standard whether shifts can overflow.
1225 The following code ignores overflow; perhaps a C standard
1226 interpretation ruling is needed. */
1227 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1235 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1240 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1244 neg_double (int2l, int2h, &low, &hi);
1245 add_double (int1l, int1h, low, hi, &low, &hi);
1246 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1250 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1253 case TRUNC_DIV_EXPR:
1254 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255 case EXACT_DIV_EXPR:
1256 /* This is a shortcut for a common special case. */
1257 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258 && ! TREE_CONSTANT_OVERFLOW (arg1)
1259 && ! TREE_CONSTANT_OVERFLOW (arg2)
1260 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1262 if (code == CEIL_DIV_EXPR)
1265 low = int1l / int2l, hi = 0;
1269 /* ... fall through ... */
1271 case ROUND_DIV_EXPR:
1272 if (int2h == 0 && int2l == 1)
1274 low = int1l, hi = int1h;
1277 if (int1l == int2l && int1h == int2h
1278 && ! (int1l == 0 && int1h == 0))
1283 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284 &low, &hi, &garbagel, &garbageh);
1287 case TRUNC_MOD_EXPR:
1288 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289 /* This is a shortcut for a common special case. */
1290 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291 && ! TREE_CONSTANT_OVERFLOW (arg1)
1292 && ! TREE_CONSTANT_OVERFLOW (arg2)
1293 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1295 if (code == CEIL_MOD_EXPR)
1297 low = int1l % int2l, hi = 0;
1301 /* ... fall through ... */
1303 case ROUND_MOD_EXPR:
1304 overflow = div_and_round_double (code, uns,
1305 int1l, int1h, int2l, int2h,
1306 &garbagel, &garbageh, &low, &hi);
1312 low = (((unsigned HOST_WIDE_INT) int1h
1313 < (unsigned HOST_WIDE_INT) int2h)
1314 || (((unsigned HOST_WIDE_INT) int1h
1315 == (unsigned HOST_WIDE_INT) int2h)
1318 low = (int1h < int2h
1319 || (int1h == int2h && int1l < int2l));
1321 if (low == (code == MIN_EXPR))
1322 low = int1l, hi = int1h;
1324 low = int2l, hi = int2h;
1331 /* If this is for a sizetype, can be represented as one (signed)
1332 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1335 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338 return size_int_type_wide (low, type);
1341 t = build_int_2 (low, hi);
1342 TREE_TYPE (t) = TREE_TYPE (arg1);
1347 ? (!uns || is_sizetype) && overflow
1348 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1350 | TREE_OVERFLOW (arg1)
1351 | TREE_OVERFLOW (arg2));
1353 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354 So check if force_fit_type truncated the value. */
1356 && ! TREE_OVERFLOW (t)
1357 && (TREE_INT_CST_HIGH (t) != hi
1358 || TREE_INT_CST_LOW (t) != low))
1359 TREE_OVERFLOW (t) = 1;
1361 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362 | TREE_CONSTANT_OVERFLOW (arg1)
1363 | TREE_CONSTANT_OVERFLOW (arg2));
1367 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368 constant. We assume ARG1 and ARG2 have the same data type, or at least
1369 are the same kind of constant and the same machine mode.
1371 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1374 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1379 if (TREE_CODE (arg1) == INTEGER_CST)
1380 return int_const_binop (code, arg1, arg2, notrunc);
1382 if (TREE_CODE (arg1) == REAL_CST)
1384 enum machine_mode mode;
1387 REAL_VALUE_TYPE value;
1390 d1 = TREE_REAL_CST (arg1);
1391 d2 = TREE_REAL_CST (arg2);
1393 type = TREE_TYPE (arg1);
1394 mode = TYPE_MODE (type);
1396 /* Don't perform operation if we honor signaling NaNs and
1397 either operand is a NaN. */
1398 if (HONOR_SNANS (mode)
1399 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1402 /* Don't perform operation if it would raise a division
1403 by zero exception. */
1404 if (code == RDIV_EXPR
1405 && REAL_VALUES_EQUAL (d2, dconst0)
1406 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1409 /* If either operand is a NaN, just return it. Otherwise, set up
1410 for floating-point trap; we return an overflow. */
1411 if (REAL_VALUE_ISNAN (d1))
1413 else if (REAL_VALUE_ISNAN (d2))
1416 REAL_ARITHMETIC (value, code, d1, d2);
1418 t = build_real (type, real_value_truncate (mode, value));
1421 = (force_fit_type (t, 0)
1422 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423 TREE_CONSTANT_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2);
1429 if (TREE_CODE (arg1) == COMPLEX_CST)
1431 tree type = TREE_TYPE (arg1);
1432 tree r1 = TREE_REALPART (arg1);
1433 tree i1 = TREE_IMAGPART (arg1);
1434 tree r2 = TREE_REALPART (arg2);
1435 tree i2 = TREE_IMAGPART (arg2);
1441 t = build_complex (type,
1442 const_binop (PLUS_EXPR, r1, r2, notrunc),
1443 const_binop (PLUS_EXPR, i1, i2, notrunc));
1447 t = build_complex (type,
1448 const_binop (MINUS_EXPR, r1, r2, notrunc),
1449 const_binop (MINUS_EXPR, i1, i2, notrunc));
1453 t = build_complex (type,
1454 const_binop (MINUS_EXPR,
1455 const_binop (MULT_EXPR,
1457 const_binop (MULT_EXPR,
1460 const_binop (PLUS_EXPR,
1461 const_binop (MULT_EXPR,
1463 const_binop (MULT_EXPR,
1471 = const_binop (PLUS_EXPR,
1472 const_binop (MULT_EXPR, r2, r2, notrunc),
1473 const_binop (MULT_EXPR, i2, i2, notrunc),
1476 t = build_complex (type,
1478 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1479 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1480 const_binop (PLUS_EXPR,
1481 const_binop (MULT_EXPR, r1, r2,
1483 const_binop (MULT_EXPR, i1, i2,
1486 magsquared, notrunc),
1488 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1489 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1490 const_binop (MINUS_EXPR,
1491 const_binop (MULT_EXPR, i1, r2,
1493 const_binop (MULT_EXPR, r1, i2,
1496 magsquared, notrunc));
1508 /* These are the hash table functions for the hash table of INTEGER_CST
1509 nodes of a sizetype. */
1511 /* Return the hash code code X, an INTEGER_CST. */
1514 size_htab_hash (const void *x)
1518 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1519 ^ htab_hash_pointer (TREE_TYPE (t))
1520 ^ (TREE_OVERFLOW (t) << 20));
1523 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1524 is the same as that given by *Y, which is the same. */
1527 size_htab_eq (const void *x, const void *y)
1532 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1533 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1534 && TREE_TYPE (xt) == TREE_TYPE (yt)
1535 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1538 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1539 bits are given by NUMBER and of the sizetype represented by KIND. */
1542 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1544 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1547 /* Likewise, but the desired type is specified explicitly. */
1549 static GTY (()) tree new_const;
1550 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1554 size_int_type_wide (HOST_WIDE_INT number, tree type)
1560 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1561 new_const = make_node (INTEGER_CST);
1564 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1565 hash table, we return the value from the hash table. Otherwise, we
1566 place that in the hash table and make a new node for the next time. */
1567 TREE_INT_CST_LOW (new_const) = number;
1568 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1569 TREE_TYPE (new_const) = type;
1570 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1571 = force_fit_type (new_const, 0);
1573 slot = htab_find_slot (size_htab, new_const, INSERT);
1579 new_const = make_node (INTEGER_CST);
1583 return (tree) *slot;
1586 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1587 is a tree code. The type of the result is taken from the operands.
1588 Both must be the same type integer type and it must be a size type.
1589 If the operands are constant, so is the result. */
1592 size_binop (enum tree_code code, tree arg0, tree arg1)
1594 tree type = TREE_TYPE (arg0);
1596 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1597 || type != TREE_TYPE (arg1))
1600 /* Handle the special case of two integer constants faster. */
1601 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1603 /* And some specific cases even faster than that. */
1604 if (code == PLUS_EXPR && integer_zerop (arg0))
1606 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1607 && integer_zerop (arg1))
1609 else if (code == MULT_EXPR && integer_onep (arg0))
1612 /* Handle general case of two integer constants. */
1613 return int_const_binop (code, arg0, arg1, 0);
1616 if (arg0 == error_mark_node || arg1 == error_mark_node)
1617 return error_mark_node;
1619 return fold (build (code, type, arg0, arg1));
1622 /* Given two values, either both of sizetype or both of bitsizetype,
1623 compute the difference between the two values. Return the value
1624 in signed type corresponding to the type of the operands. */
1627 size_diffop (tree arg0, tree arg1)
1629 tree type = TREE_TYPE (arg0);
1632 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1633 || type != TREE_TYPE (arg1))
1636 /* If the type is already signed, just do the simple thing. */
1637 if (! TREE_UNSIGNED (type))
1638 return size_binop (MINUS_EXPR, arg0, arg1);
1640 ctype = (type == bitsizetype || type == ubitsizetype
1641 ? sbitsizetype : ssizetype);
1643 /* If either operand is not a constant, do the conversions to the signed
1644 type and subtract. The hardware will do the right thing with any
1645 overflow in the subtraction. */
1646 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1647 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1648 fold_convert (ctype, arg1));
1650 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1651 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1652 overflow) and negate (which can't either). Special-case a result
1653 of zero while we're here. */
1654 if (tree_int_cst_equal (arg0, arg1))
1655 return fold_convert (ctype, integer_zero_node);
1656 else if (tree_int_cst_lt (arg1, arg0))
1657 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1659 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1660 fold_convert (ctype, size_binop (MINUS_EXPR,
1665 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1666 type TYPE. If no simplification can be done return NULL_TREE. */
1669 fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1675 if (TREE_TYPE (arg1) == type)
1678 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1680 if (TREE_CODE (arg1) == INTEGER_CST)
1682 /* If we would build a constant wider than GCC supports,
1683 leave the conversion unfolded. */
1684 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1687 /* If we are trying to make a sizetype for a small integer, use
1688 size_int to pick up cached types to reduce duplicate nodes. */
1689 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1690 && !TREE_CONSTANT_OVERFLOW (arg1)
1691 && compare_tree_int (arg1, 10000) < 0)
1692 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1694 /* Given an integer constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1697 TREE_INT_CST_HIGH (arg1));
1698 TREE_TYPE (t) = type;
1699 /* Indicate an overflow if (1) ARG1 already overflowed,
1700 or (2) force_fit_type indicates an overflow.
1701 Tell force_fit_type that an overflow has already occurred
1702 if ARG1 is a too-large unsigned value and T is signed.
1703 But don't indicate an overflow if converting a pointer. */
1705 = ((force_fit_type (t,
1706 (TREE_INT_CST_HIGH (arg1) < 0
1707 && (TREE_UNSIGNED (type)
1708 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1709 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1710 || TREE_OVERFLOW (arg1));
1711 TREE_CONSTANT_OVERFLOW (t)
1712 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1715 else if (TREE_CODE (arg1) == REAL_CST)
1717 /* The following code implements the floating point to integer
1718 conversion rules required by the Java Language Specification,
1719 that IEEE NaNs are mapped to zero and values that overflow
1720 the target precision saturate, i.e. values greater than
1721 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1722 are mapped to INT_MIN. These semantics are allowed by the
1723 C and C++ standards that simply state that the behavior of
1724 FP-to-integer conversion is unspecified upon overflow. */
1726 HOST_WIDE_INT high, low;
1728 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1729 /* If x is NaN, return zero and show we have an overflow. */
1730 if (REAL_VALUE_ISNAN (x))
1737 /* See if X will be in range after truncation towards 0.
1738 To compensate for truncation, move the bounds away from 0,
1739 but reject if X exactly equals the adjusted bounds. */
1743 tree lt = TYPE_MIN_VALUE (type);
1744 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1745 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1746 if (! REAL_VALUES_LESS (l, x))
1749 high = TREE_INT_CST_HIGH (lt);
1750 low = TREE_INT_CST_LOW (lt);
1756 tree ut = TYPE_MAX_VALUE (type);
1759 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1760 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1761 if (! REAL_VALUES_LESS (x, u))
1764 high = TREE_INT_CST_HIGH (ut);
1765 low = TREE_INT_CST_LOW (ut);
1771 REAL_VALUE_TO_INT (&low, &high, x);
1773 t = build_int_2 (low, high);
1774 TREE_TYPE (t) = type;
1776 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1777 TREE_CONSTANT_OVERFLOW (t)
1778 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1782 else if (TREE_CODE (type) == REAL_TYPE)
1784 if (TREE_CODE (arg1) == INTEGER_CST)
1785 return build_real_from_int_cst (type, arg1);
1786 if (TREE_CODE (arg1) == REAL_CST)
1788 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1790 /* We make a copy of ARG1 so that we don't modify an
1791 existing constant tree. */
1792 t = copy_node (arg1);
1793 TREE_TYPE (t) = type;
1797 t = build_real (type,
1798 real_value_truncate (TYPE_MODE (type),
1799 TREE_REAL_CST (arg1)));
1802 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1803 TREE_CONSTANT_OVERFLOW (t)
1804 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1811 /* Convert expression ARG to type TYPE. Used by the middle-end for
1812 simple conversions in preference to calling the front-end's convert. */
1815 fold_convert (tree type, tree arg)
1817 tree orig = TREE_TYPE (arg);
1823 if (TREE_CODE (arg) == ERROR_MARK
1824 || TREE_CODE (type) == ERROR_MARK
1825 || TREE_CODE (orig) == ERROR_MARK)
1826 return error_mark_node;
1828 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1829 return fold (build1 (NOP_EXPR, type, arg));
1831 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1832 || TREE_CODE (type) == OFFSET_TYPE)
1834 if (TREE_CODE (arg) == INTEGER_CST)
1836 tem = fold_convert_const (NOP_EXPR, type, arg);
1837 if (tem != NULL_TREE)
1840 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1841 || TREE_CODE (orig) == OFFSET_TYPE)
1842 return fold (build1 (NOP_EXPR, type, arg));
1843 if (TREE_CODE (orig) == COMPLEX_TYPE)
1845 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1846 return fold_convert (type, tem);
1848 if (TREE_CODE (orig) == VECTOR_TYPE
1849 && GET_MODE_SIZE (TYPE_MODE (type))
1850 == GET_MODE_SIZE (TYPE_MODE (orig)))
1851 return fold (build1 (NOP_EXPR, type, arg));
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg) == INTEGER_CST)
1857 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1858 if (tem != NULL_TREE)
1861 else if (TREE_CODE (arg) == REAL_CST)
1863 tem = fold_convert_const (NOP_EXPR, type, arg);
1864 if (tem != NULL_TREE)
1868 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1869 return fold (build1 (FLOAT_EXPR, type, arg));
1870 if (TREE_CODE (orig) == REAL_TYPE)
1871 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1873 if (TREE_CODE (orig) == COMPLEX_TYPE)
1875 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1876 return fold_convert (type, tem);
1879 else if (TREE_CODE (type) == COMPLEX_TYPE)
1881 if (INTEGRAL_TYPE_P (orig)
1882 || POINTER_TYPE_P (orig)
1883 || TREE_CODE (orig) == REAL_TYPE)
1884 return build (COMPLEX_EXPR, type,
1885 fold_convert (TREE_TYPE (type), arg),
1886 fold_convert (TREE_TYPE (type), integer_zero_node));
1887 if (TREE_CODE (orig) == COMPLEX_TYPE)
1891 if (TREE_CODE (arg) == COMPLEX_EXPR)
1893 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1894 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1895 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1898 arg = save_expr (arg);
1899 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1900 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1901 rpart = fold_convert (TREE_TYPE (type), rpart);
1902 ipart = fold_convert (TREE_TYPE (type), ipart);
1903 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1906 else if (TREE_CODE (type) == VECTOR_TYPE)
1908 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1909 && GET_MODE_SIZE (TYPE_MODE (type))
1910 == GET_MODE_SIZE (TYPE_MODE (orig)))
1911 return fold (build1 (NOP_EXPR, type, arg));
1912 if (TREE_CODE (orig) == VECTOR_TYPE
1913 && GET_MODE_SIZE (TYPE_MODE (type))
1914 == GET_MODE_SIZE (TYPE_MODE (orig)))
1915 return fold (build1 (NOP_EXPR, type, arg));
1917 else if (VOID_TYPE_P (type))
1918 return fold (build1 (CONVERT_EXPR, type, arg));
1922 /* Return an expr equal to X but certainly not valid as an lvalue. */
1929 /* These things are certainly not lvalues. */
1930 if (TREE_CODE (x) == NON_LVALUE_EXPR
1931 || TREE_CODE (x) == INTEGER_CST
1932 || TREE_CODE (x) == REAL_CST
1933 || TREE_CODE (x) == STRING_CST
1934 || TREE_CODE (x) == ADDR_EXPR)
1937 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1938 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1942 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1943 Zero means allow extended lvalues. */
1945 int pedantic_lvalues;
1947 /* When pedantic, return an expr equal to X but certainly not valid as a
1948 pedantic lvalue. Otherwise, return X. */
1951 pedantic_non_lvalue (tree x)
1953 if (pedantic_lvalues)
1954 return non_lvalue (x);
1959 /* Given a tree comparison code, return the code that is the logical inverse
1960 of the given code. It is not safe to do this for floating-point
1961 comparisons, except for NE_EXPR and EQ_EXPR. */
1963 static enum tree_code
1964 invert_tree_comparison (enum tree_code code)
1985 /* Similar, but return the comparison that results if the operands are
1986 swapped. This is safe for floating-point. */
1988 static enum tree_code
1989 swap_tree_comparison (enum tree_code code)
2010 /* Convert a comparison tree code from an enum tree_code representation
2011 into a compcode bit-based encoding. This function is the inverse of
2012 compcode_to_comparison. */
2015 comparison_to_compcode (enum tree_code code)
2036 /* Convert a compcode bit-based encoding of a comparison operator back
2037 to GCC's enum tree_code representation. This function is the
2038 inverse of comparison_to_compcode. */
2040 static enum tree_code
2041 compcode_to_comparison (int code)
2062 /* Return nonzero if CODE is a tree code that represents a truth value. */
2065 truth_value_p (enum tree_code code)
2067 return (TREE_CODE_CLASS (code) == '<'
2068 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2069 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2070 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2073 /* Return nonzero if two operands (typically of the same tree node)
2074 are necessarily equal. If either argument has side-effects this
2075 function returns zero.
2077 If ONLY_CONST is nonzero, only return nonzero for constants.
2078 This function tests whether the operands are indistinguishable;
2079 it does not test whether they are equal using C's == operation.
2080 The distinction is important for IEEE floating point, because
2081 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2082 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2084 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2085 even though it may hold multiple values during a function.
2086 This is because a GCC tree node guarantees that nothing else is
2087 executed between the evaluation of its "operands" (which may often
2088 be evaluated in arbitrary order). Hence if the operands themselves
2089 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2090 same value in each operand/subexpression. Hence a zero value for
2091 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2092 If comparing arbitrary expression trees, such as from different
2093 statements, ONLY_CONST must usually be nonzero. */
2096 operand_equal_p (tree arg0, tree arg1, int only_const)
2100 /* If both types don't have the same signedness, then we can't consider
2101 them equal. We must check this before the STRIP_NOPS calls
2102 because they may change the signedness of the arguments. */
2103 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2109 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2110 /* This is needed for conversions and for COMPONENT_REF.
2111 Might as well play it safe and always test this. */
2112 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2113 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2114 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2117 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2118 We don't care about side effects in that case because the SAVE_EXPR
2119 takes care of that for us. In all other cases, two expressions are
2120 equal if they have no side effects. If we have two identical
2121 expressions with side effects that should be treated the same due
2122 to the only side effects being identical SAVE_EXPR's, that will
2123 be detected in the recursive calls below. */
2124 if (arg0 == arg1 && ! only_const
2125 && (TREE_CODE (arg0) == SAVE_EXPR
2126 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2129 /* Next handle constant cases, those for which we can return 1 even
2130 if ONLY_CONST is set. */
2131 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2132 switch (TREE_CODE (arg0))
2135 return (! TREE_CONSTANT_OVERFLOW (arg0)
2136 && ! TREE_CONSTANT_OVERFLOW (arg1)
2137 && tree_int_cst_equal (arg0, arg1));
2140 return (! TREE_CONSTANT_OVERFLOW (arg0)
2141 && ! TREE_CONSTANT_OVERFLOW (arg1)
2142 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2143 TREE_REAL_CST (arg1)));
2149 if (TREE_CONSTANT_OVERFLOW (arg0)
2150 || TREE_CONSTANT_OVERFLOW (arg1))
2153 v1 = TREE_VECTOR_CST_ELTS (arg0);
2154 v2 = TREE_VECTOR_CST_ELTS (arg1);
2157 if (!operand_equal_p (v1, v2, only_const))
2159 v1 = TREE_CHAIN (v1);
2160 v2 = TREE_CHAIN (v2);
2167 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2169 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2173 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2174 && ! memcmp (TREE_STRING_POINTER (arg0),
2175 TREE_STRING_POINTER (arg1),
2176 TREE_STRING_LENGTH (arg0)));
2179 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2188 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2191 /* Two conversions are equal only if signedness and modes match. */
2192 switch (TREE_CODE (arg0))
2197 case FIX_TRUNC_EXPR:
2198 case FIX_FLOOR_EXPR:
2199 case FIX_ROUND_EXPR:
2200 if (TREE_UNSIGNED (TREE_TYPE (arg0))
2201 != TREE_UNSIGNED (TREE_TYPE (arg1)))
2208 return operand_equal_p (TREE_OPERAND (arg0, 0),
2209 TREE_OPERAND (arg1, 0), 0);
2213 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2214 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2218 /* For commutative ops, allow the other order. */
2219 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2220 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2221 || TREE_CODE (arg0) == BIT_IOR_EXPR
2222 || TREE_CODE (arg0) == BIT_XOR_EXPR
2223 || TREE_CODE (arg0) == BIT_AND_EXPR
2224 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2225 && operand_equal_p (TREE_OPERAND (arg0, 0),
2226 TREE_OPERAND (arg1, 1), 0)
2227 && operand_equal_p (TREE_OPERAND (arg0, 1),
2228 TREE_OPERAND (arg1, 0), 0));
2231 /* If either of the pointer (or reference) expressions we are
2232 dereferencing contain a side effect, these cannot be equal. */
2233 if (TREE_SIDE_EFFECTS (arg0)
2234 || TREE_SIDE_EFFECTS (arg1))
2237 switch (TREE_CODE (arg0))
2240 return operand_equal_p (TREE_OPERAND (arg0, 0),
2241 TREE_OPERAND (arg1, 0), 0);
2245 case ARRAY_RANGE_REF:
2246 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2247 TREE_OPERAND (arg1, 0), 0)
2248 && operand_equal_p (TREE_OPERAND (arg0, 1),
2249 TREE_OPERAND (arg1, 1), 0));
2252 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2253 TREE_OPERAND (arg1, 0), 0)
2254 && operand_equal_p (TREE_OPERAND (arg0, 1),
2255 TREE_OPERAND (arg1, 1), 0)
2256 && operand_equal_p (TREE_OPERAND (arg0, 2),
2257 TREE_OPERAND (arg1, 2), 0));
2263 switch (TREE_CODE (arg0))
2266 case TRUTH_NOT_EXPR:
2267 return operand_equal_p (TREE_OPERAND (arg0, 0),
2268 TREE_OPERAND (arg1, 0), 0);
2271 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2274 /* If the CALL_EXPRs call different functions, then they
2275 clearly can not be equal. */
2276 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2277 TREE_OPERAND (arg1, 0), 0))
2280 /* Only consider const functions equivalent. */
2281 fndecl = get_callee_fndecl (arg0);
2282 if (fndecl == NULL_TREE
2283 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2286 /* Now see if all the arguments are the same. operand_equal_p
2287 does not handle TREE_LIST, so we walk the operands here
2288 feeding them to operand_equal_p. */
2289 arg0 = TREE_OPERAND (arg0, 1);
2290 arg1 = TREE_OPERAND (arg1, 1);
2291 while (arg0 && arg1)
2293 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2296 arg0 = TREE_CHAIN (arg0);
2297 arg1 = TREE_CHAIN (arg1);
2300 /* If we get here and both argument lists are exhausted
2301 then the CALL_EXPRs are equal. */
2302 return ! (arg0 || arg1);
2309 /* Consider __builtin_sqrt equal to sqrt. */
2310 return TREE_CODE (arg0) == FUNCTION_DECL
2311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2320 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2321 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2323 When in doubt, return 0. */
2326 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2328 int unsignedp1, unsignedpo;
2329 tree primarg0, primarg1, primother;
2330 unsigned int correct_width;
2332 if (operand_equal_p (arg0, arg1, 0))
2335 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2336 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2339 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2340 and see if the inner values are the same. This removes any
2341 signedness comparison, which doesn't matter here. */
2342 primarg0 = arg0, primarg1 = arg1;
2343 STRIP_NOPS (primarg0);
2344 STRIP_NOPS (primarg1);
2345 if (operand_equal_p (primarg0, primarg1, 0))
2348 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2349 actual comparison operand, ARG0.
2351 First throw away any conversions to wider types
2352 already present in the operands. */
2354 primarg1 = get_narrower (arg1, &unsignedp1);
2355 primother = get_narrower (other, &unsignedpo);
2357 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2358 if (unsignedp1 == unsignedpo
2359 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2360 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2362 tree type = TREE_TYPE (arg0);
2364 /* Make sure shorter operand is extended the right way
2365 to match the longer operand. */
2366 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2367 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2369 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2376 /* See if ARG is an expression that is either a comparison or is performing
2377 arithmetic on comparisons. The comparisons must only be comparing
2378 two different values, which will be stored in *CVAL1 and *CVAL2; if
2379 they are nonzero it means that some operands have already been found.
2380 No variables may be used anywhere else in the expression except in the
2381 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2382 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2384 If this is true, return 1. Otherwise, return zero. */
2387 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2389 enum tree_code code = TREE_CODE (arg);
2390 char class = TREE_CODE_CLASS (code);
2392 /* We can handle some of the 'e' cases here. */
2393 if (class == 'e' && code == TRUTH_NOT_EXPR)
2395 else if (class == 'e'
2396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2397 || code == COMPOUND_EXPR))
2400 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2401 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2403 /* If we've already found a CVAL1 or CVAL2, this expression is
2404 two complex to handle. */
2405 if (*cval1 || *cval2)
2415 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2418 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2419 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2420 cval1, cval2, save_p));
2426 if (code == COND_EXPR)
2427 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2428 cval1, cval2, save_p)
2429 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2430 cval1, cval2, save_p)
2431 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2432 cval1, cval2, save_p));
2436 /* First see if we can handle the first operand, then the second. For
2437 the second operand, we know *CVAL1 can't be zero. It must be that
2438 one side of the comparison is each of the values; test for the
2439 case where this isn't true by failing if the two operands
2442 if (operand_equal_p (TREE_OPERAND (arg, 0),
2443 TREE_OPERAND (arg, 1), 0))
2447 *cval1 = TREE_OPERAND (arg, 0);
2448 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2450 else if (*cval2 == 0)
2451 *cval2 = TREE_OPERAND (arg, 0);
2452 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2457 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2459 else if (*cval2 == 0)
2460 *cval2 = TREE_OPERAND (arg, 1);
2461 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2473 /* ARG is a tree that is known to contain just arithmetic operations and
2474 comparisons. Evaluate the operations in the tree substituting NEW0 for
2475 any occurrence of OLD0 as an operand of a comparison and likewise for
2479 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2481 tree type = TREE_TYPE (arg);
2482 enum tree_code code = TREE_CODE (arg);
2483 char class = TREE_CODE_CLASS (code);
2485 /* We can handle some of the 'e' cases here. */
2486 if (class == 'e' && code == TRUTH_NOT_EXPR)
2488 else if (class == 'e'
2489 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2495 return fold (build1 (code, type,
2496 eval_subst (TREE_OPERAND (arg, 0),
2497 old0, new0, old1, new1)));
2500 return fold (build (code, type,
2501 eval_subst (TREE_OPERAND (arg, 0),
2502 old0, new0, old1, new1),
2503 eval_subst (TREE_OPERAND (arg, 1),
2504 old0, new0, old1, new1)));
2510 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2513 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2516 return fold (build (code, type,
2517 eval_subst (TREE_OPERAND (arg, 0),
2518 old0, new0, old1, new1),
2519 eval_subst (TREE_OPERAND (arg, 1),
2520 old0, new0, old1, new1),
2521 eval_subst (TREE_OPERAND (arg, 2),
2522 old0, new0, old1, new1)));
2526 /* Fall through - ??? */
2530 tree arg0 = TREE_OPERAND (arg, 0);
2531 tree arg1 = TREE_OPERAND (arg, 1);
2533 /* We need to check both for exact equality and tree equality. The
2534 former will be true if the operand has a side-effect. In that
2535 case, we know the operand occurred exactly once. */
2537 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2539 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2542 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2544 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2547 return fold (build (code, type, arg0, arg1));
2555 /* Return a tree for the case when the result of an expression is RESULT
2556 converted to TYPE and OMITTED was previously an operand of the expression
2557 but is now not needed (e.g., we folded OMITTED * 0).
2559 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2560 the conversion of RESULT to TYPE. */
2563 omit_one_operand (tree type, tree result, tree omitted)
2565 tree t = fold_convert (type, result);
2567 if (TREE_SIDE_EFFECTS (omitted))
2568 return build (COMPOUND_EXPR, type, omitted, t);
2570 return non_lvalue (t);
2573 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2576 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2578 tree t = fold_convert (type, result);
2580 if (TREE_SIDE_EFFECTS (omitted))
2581 return build (COMPOUND_EXPR, type, omitted, t);
2583 return pedantic_non_lvalue (t);
2586 /* Return a simplified tree node for the truth-negation of ARG. This
2587 never alters ARG itself. We assume that ARG is an operation that
2588 returns a truth value (0 or 1). */
2591 invert_truthvalue (tree arg)
2593 tree type = TREE_TYPE (arg);
2594 enum tree_code code = TREE_CODE (arg);
2596 if (code == ERROR_MARK)
2599 /* If this is a comparison, we can simply invert it, except for
2600 floating-point non-equality comparisons, in which case we just
2601 enclose a TRUTH_NOT_EXPR around what we have. */
2603 if (TREE_CODE_CLASS (code) == '<')
2605 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2606 && !flag_unsafe_math_optimizations
2609 return build1 (TRUTH_NOT_EXPR, type, arg);
2611 return build (invert_tree_comparison (code), type,
2612 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2618 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2620 case TRUTH_AND_EXPR:
2621 return build (TRUTH_OR_EXPR, type,
2622 invert_truthvalue (TREE_OPERAND (arg, 0)),
2623 invert_truthvalue (TREE_OPERAND (arg, 1)));
2626 return build (TRUTH_AND_EXPR, type,
2627 invert_truthvalue (TREE_OPERAND (arg, 0)),
2628 invert_truthvalue (TREE_OPERAND (arg, 1)));
2630 case TRUTH_XOR_EXPR:
2631 /* Here we can invert either operand. We invert the first operand
2632 unless the second operand is a TRUTH_NOT_EXPR in which case our
2633 result is the XOR of the first operand with the inside of the
2634 negation of the second operand. */
2636 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2637 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2638 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2640 return build (TRUTH_XOR_EXPR, type,
2641 invert_truthvalue (TREE_OPERAND (arg, 0)),
2642 TREE_OPERAND (arg, 1));
2644 case TRUTH_ANDIF_EXPR:
2645 return build (TRUTH_ORIF_EXPR, type,
2646 invert_truthvalue (TREE_OPERAND (arg, 0)),
2647 invert_truthvalue (TREE_OPERAND (arg, 1)));
2649 case TRUTH_ORIF_EXPR:
2650 return build (TRUTH_ANDIF_EXPR, type,
2651 invert_truthvalue (TREE_OPERAND (arg, 0)),
2652 invert_truthvalue (TREE_OPERAND (arg, 1)));
2654 case TRUTH_NOT_EXPR:
2655 return TREE_OPERAND (arg, 0);
2658 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2659 invert_truthvalue (TREE_OPERAND (arg, 1)),
2660 invert_truthvalue (TREE_OPERAND (arg, 2)));
2663 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2664 invert_truthvalue (TREE_OPERAND (arg, 1)));
2666 case WITH_RECORD_EXPR:
2667 return build (WITH_RECORD_EXPR, type,
2668 invert_truthvalue (TREE_OPERAND (arg, 0)),
2669 TREE_OPERAND (arg, 1));
2671 case NON_LVALUE_EXPR:
2672 return invert_truthvalue (TREE_OPERAND (arg, 0));
2677 return build1 (TREE_CODE (arg), type,
2678 invert_truthvalue (TREE_OPERAND (arg, 0)));
2681 if (!integer_onep (TREE_OPERAND (arg, 1)))
2683 return build (EQ_EXPR, type, arg,
2684 fold_convert (type, integer_zero_node));
2687 return build1 (TRUTH_NOT_EXPR, type, arg);
2689 case CLEANUP_POINT_EXPR:
2690 return build1 (CLEANUP_POINT_EXPR, type,
2691 invert_truthvalue (TREE_OPERAND (arg, 0)));
2696 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2698 return build1 (TRUTH_NOT_EXPR, type, arg);
2701 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2702 operands are another bit-wise operation with a common input. If so,
2703 distribute the bit operations to save an operation and possibly two if
2704 constants are involved. For example, convert
2705 (A | B) & (A | C) into A | (B & C)
2706 Further simplification will occur if B and C are constants.
2708 If this optimization cannot be done, 0 will be returned. */
2711 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2716 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2717 || TREE_CODE (arg0) == code
2718 || (TREE_CODE (arg0) != BIT_AND_EXPR
2719 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2722 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2724 common = TREE_OPERAND (arg0, 0);
2725 left = TREE_OPERAND (arg0, 1);
2726 right = TREE_OPERAND (arg1, 1);
2728 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2730 common = TREE_OPERAND (arg0, 0);
2731 left = TREE_OPERAND (arg0, 1);
2732 right = TREE_OPERAND (arg1, 0);
2734 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2736 common = TREE_OPERAND (arg0, 1);
2737 left = TREE_OPERAND (arg0, 0);
2738 right = TREE_OPERAND (arg1, 1);
2740 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2742 common = TREE_OPERAND (arg0, 1);
2743 left = TREE_OPERAND (arg0, 0);
2744 right = TREE_OPERAND (arg1, 0);
2749 return fold (build (TREE_CODE (arg0), type, common,
2750 fold (build (code, type, left, right))));
2753 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2754 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2757 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2760 tree result = build (BIT_FIELD_REF, type, inner,
2761 size_int (bitsize), bitsize_int (bitpos));
2763 TREE_UNSIGNED (result) = unsignedp;
2768 /* Optimize a bit-field compare.
2770 There are two cases: First is a compare against a constant and the
2771 second is a comparison of two items where the fields are at the same
2772 bit position relative to the start of a chunk (byte, halfword, word)
2773 large enough to contain it. In these cases we can avoid the shift
2774 implicit in bitfield extractions.
2776 For constants, we emit a compare of the shifted constant with the
2777 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2778 compared. For two fields at the same position, we do the ANDs with the
2779 similar mask and compare the result of the ANDs.
2781 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2782 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2783 are the left and right operands of the comparison, respectively.
2785 If the optimization described above can be done, we return the resulting
2786 tree. Otherwise we return zero. */
2789 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2792 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2793 tree type = TREE_TYPE (lhs);
2794 tree signed_type, unsigned_type;
2795 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2796 enum machine_mode lmode, rmode, nmode;
2797 int lunsignedp, runsignedp;
2798 int lvolatilep = 0, rvolatilep = 0;
2799 tree linner, rinner = NULL_TREE;
2803 /* Get all the information about the extractions being done. If the bit size
2804 if the same as the size of the underlying object, we aren't doing an
2805 extraction at all and so can do nothing. We also don't want to
2806 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2807 then will no longer be able to replace it. */
2808 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2809 &lunsignedp, &lvolatilep);
2810 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2811 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2816 /* If this is not a constant, we can only do something if bit positions,
2817 sizes, and signedness are the same. */
2818 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2819 &runsignedp, &rvolatilep);
2821 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2822 || lunsignedp != runsignedp || offset != 0
2823 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2827 /* See if we can find a mode to refer to this field. We should be able to,
2828 but fail if we can't. */
2829 nmode = get_best_mode (lbitsize, lbitpos,
2830 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2831 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2832 TYPE_ALIGN (TREE_TYPE (rinner))),
2833 word_mode, lvolatilep || rvolatilep);
2834 if (nmode == VOIDmode)
2837 /* Set signed and unsigned types of the precision of this mode for the
2839 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2840 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2842 /* Compute the bit position and size for the new reference and our offset
2843 within it. If the new reference is the same size as the original, we
2844 won't optimize anything, so return zero. */
2845 nbitsize = GET_MODE_BITSIZE (nmode);
2846 nbitpos = lbitpos & ~ (nbitsize - 1);
2848 if (nbitsize == lbitsize)
2851 if (BYTES_BIG_ENDIAN)
2852 lbitpos = nbitsize - lbitsize - lbitpos;
2854 /* Make the mask to be used against the extracted field. */
2855 mask = build_int_2 (~0, ~0);
2856 TREE_TYPE (mask) = unsigned_type;
2857 force_fit_type (mask, 0);
2858 mask = fold_convert (unsigned_type, mask);
2859 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2860 mask = const_binop (RSHIFT_EXPR, mask,
2861 size_int (nbitsize - lbitsize - lbitpos), 0);
2864 /* If not comparing with constant, just rework the comparison
2866 return build (code, compare_type,
2867 build (BIT_AND_EXPR, unsigned_type,
2868 make_bit_field_ref (linner, unsigned_type,
2869 nbitsize, nbitpos, 1),
2871 build (BIT_AND_EXPR, unsigned_type,
2872 make_bit_field_ref (rinner, unsigned_type,
2873 nbitsize, nbitpos, 1),
2876 /* Otherwise, we are handling the constant case. See if the constant is too
2877 big for the field. Warn and return a tree of for 0 (false) if so. We do
2878 this not only for its own sake, but to avoid having to test for this
2879 error case below. If we didn't, we might generate wrong code.
2881 For unsigned fields, the constant shifted right by the field length should
2882 be all zero. For signed fields, the high-order bits should agree with
2887 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2888 fold_convert (unsigned_type, rhs),
2889 size_int (lbitsize), 0)))
2891 warning ("comparison is always %d due to width of bit-field",
2893 return fold_convert (compare_type,
2895 ? integer_one_node : integer_zero_node));
2900 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2901 size_int (lbitsize - 1), 0);
2902 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2904 warning ("comparison is always %d due to width of bit-field",
2906 return fold_convert (compare_type,
2908 ? integer_one_node : integer_zero_node));
2912 /* Single-bit compares should always be against zero. */
2913 if (lbitsize == 1 && ! integer_zerop (rhs))
2915 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2916 rhs = fold_convert (type, integer_zero_node);
2919 /* Make a new bitfield reference, shift the constant over the
2920 appropriate number of bits and mask it with the computed mask
2921 (in case this was a signed field). If we changed it, make a new one. */
2922 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2925 TREE_SIDE_EFFECTS (lhs) = 1;
2926 TREE_THIS_VOLATILE (lhs) = 1;
2929 rhs = fold (const_binop (BIT_AND_EXPR,
2930 const_binop (LSHIFT_EXPR,
2931 fold_convert (unsigned_type, rhs),
2932 size_int (lbitpos), 0),
2935 return build (code, compare_type,
2936 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2940 /* Subroutine for fold_truthop: decode a field reference.
2942 If EXP is a comparison reference, we return the innermost reference.
2944 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2945 set to the starting bit number.
2947 If the innermost field can be completely contained in a mode-sized
2948 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2950 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2951 otherwise it is not changed.
2953 *PUNSIGNEDP is set to the signedness of the field.
2955 *PMASK is set to the mask used. This is either contained in a
2956 BIT_AND_EXPR or derived from the width of the field.
2958 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2960 Return 0 if this is not a component reference or is one that we can't
2961 do anything with. */
2964 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2965 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2966 int *punsignedp, int *pvolatilep,
2967 tree *pmask, tree *pand_mask)
2969 tree outer_type = 0;
2971 tree mask, inner, offset;
2973 unsigned int precision;
2975 /* All the optimizations using this function assume integer fields.
2976 There are problems with FP fields since the type_for_size call
2977 below can fail for, e.g., XFmode. */
2978 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2981 /* We are interested in the bare arrangement of bits, so strip everything
2982 that doesn't affect the machine mode. However, record the type of the
2983 outermost expression if it may matter below. */
2984 if (TREE_CODE (exp) == NOP_EXPR
2985 || TREE_CODE (exp) == CONVERT_EXPR
2986 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2987 outer_type = TREE_TYPE (exp);
2990 if (TREE_CODE (exp) == BIT_AND_EXPR)
2992 and_mask = TREE_OPERAND (exp, 1);
2993 exp = TREE_OPERAND (exp, 0);
2994 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2995 if (TREE_CODE (and_mask) != INTEGER_CST)
2999 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3000 punsignedp, pvolatilep);
3001 if ((inner == exp && and_mask == 0)
3002 || *pbitsize < 0 || offset != 0
3003 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3006 /* If the number of bits in the reference is the same as the bitsize of
3007 the outer type, then the outer type gives the signedness. Otherwise
3008 (in case of a small bitfield) the signedness is unchanged. */
3009 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3010 *punsignedp = TREE_UNSIGNED (outer_type);
3012 /* Compute the mask to access the bitfield. */
3013 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3014 precision = TYPE_PRECISION (unsigned_type);
3016 mask = build_int_2 (~0, ~0);
3017 TREE_TYPE (mask) = unsigned_type;
3018 force_fit_type (mask, 0);
3019 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3020 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3022 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3024 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3025 fold_convert (unsigned_type, and_mask), mask));
3028 *pand_mask = and_mask;
3032 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3036 all_ones_mask_p (tree mask, int size)
3038 tree type = TREE_TYPE (mask);
3039 unsigned int precision = TYPE_PRECISION (type);
3042 tmask = build_int_2 (~0, ~0);
3043 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3044 force_fit_type (tmask, 0);
3046 tree_int_cst_equal (mask,
3047 const_binop (RSHIFT_EXPR,
3048 const_binop (LSHIFT_EXPR, tmask,
3049 size_int (precision - size),
3051 size_int (precision - size), 0));
3054 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3055 represents the sign bit of EXP's type. If EXP represents a sign
3056 or zero extension, also test VAL against the unextended type.
3057 The return value is the (sub)expression whose sign bit is VAL,
3058 or NULL_TREE otherwise. */
3061 sign_bit_p (tree exp, tree val)
3063 unsigned HOST_WIDE_INT mask_lo, lo;
3064 HOST_WIDE_INT mask_hi, hi;
3068 /* Tree EXP must have an integral type. */
3069 t = TREE_TYPE (exp);
3070 if (! INTEGRAL_TYPE_P (t))
3073 /* Tree VAL must be an integer constant. */
3074 if (TREE_CODE (val) != INTEGER_CST
3075 || TREE_CONSTANT_OVERFLOW (val))
3078 width = TYPE_PRECISION (t);
3079 if (width > HOST_BITS_PER_WIDE_INT)
3081 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3084 mask_hi = ((unsigned HOST_WIDE_INT) -1
3085 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3091 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3094 mask_lo = ((unsigned HOST_WIDE_INT) -1
3095 >> (HOST_BITS_PER_WIDE_INT - width));
3098 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3099 treat VAL as if it were unsigned. */
3100 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3101 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3104 /* Handle extension from a narrower type. */
3105 if (TREE_CODE (exp) == NOP_EXPR
3106 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3107 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3112 /* Subroutine for fold_truthop: determine if an operand is simple enough
3113 to be evaluated unconditionally. */
3116 simple_operand_p (tree exp)
3118 /* Strip any conversions that don't change the machine mode. */
3119 while ((TREE_CODE (exp) == NOP_EXPR
3120 || TREE_CODE (exp) == CONVERT_EXPR)
3121 && (TYPE_MODE (TREE_TYPE (exp))
3122 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3123 exp = TREE_OPERAND (exp, 0);
3125 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3127 && ! TREE_ADDRESSABLE (exp)
3128 && ! TREE_THIS_VOLATILE (exp)
3129 && ! DECL_NONLOCAL (exp)
3130 /* Don't regard global variables as simple. They may be
3131 allocated in ways unknown to the compiler (shared memory,
3132 #pragma weak, etc). */
3133 && ! TREE_PUBLIC (exp)
3134 && ! DECL_EXTERNAL (exp)
3135 /* Loading a static variable is unduly expensive, but global
3136 registers aren't expensive. */
3137 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3140 /* The following functions are subroutines to fold_range_test and allow it to
3141 try to change a logical combination of comparisons into a range test.
3144 X == 2 || X == 3 || X == 4 || X == 5
3148 (unsigned) (X - 2) <= 3
3150 We describe each set of comparisons as being either inside or outside
3151 a range, using a variable named like IN_P, and then describe the
3152 range with a lower and upper bound. If one of the bounds is omitted,
3153 it represents either the highest or lowest value of the type.
3155 In the comments below, we represent a range by two numbers in brackets
3156 preceded by a "+" to designate being inside that range, or a "-" to
3157 designate being outside that range, so the condition can be inverted by
3158 flipping the prefix. An omitted bound is represented by a "-". For
3159 example, "- [-, 10]" means being outside the range starting at the lowest
3160 possible value and ending at 10, in other words, being greater than 10.
3161 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3164 We set up things so that the missing bounds are handled in a consistent
3165 manner so neither a missing bound nor "true" and "false" need to be
3166 handled using a special case. */
3168 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3169 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3170 and UPPER1_P are nonzero if the respective argument is an upper bound
3171 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3172 must be specified for a comparison. ARG1 will be converted to ARG0's
3173 type if both are specified. */
3176 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3177 tree arg1, int upper1_p)
3183 /* If neither arg represents infinity, do the normal operation.
3184 Else, if not a comparison, return infinity. Else handle the special
3185 comparison rules. Note that most of the cases below won't occur, but
3186 are handled for consistency. */
3188 if (arg0 != 0 && arg1 != 0)
3190 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3191 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3193 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3196 if (TREE_CODE_CLASS (code) != '<')
3199 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3200 for neither. In real maths, we cannot assume open ended ranges are
3201 the same. But, this is computer arithmetic, where numbers are finite.
3202 We can therefore make the transformation of any unbounded range with
3203 the value Z, Z being greater than any representable number. This permits
3204 us to treat unbounded ranges as equal. */
3205 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3206 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3210 result = sgn0 == sgn1;
3213 result = sgn0 != sgn1;
3216 result = sgn0 < sgn1;
3219 result = sgn0 <= sgn1;
3222 result = sgn0 > sgn1;
3225 result = sgn0 >= sgn1;
3231 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3234 /* Given EXP, a logical expression, set the range it is testing into
3235 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3236 actually being tested. *PLOW and *PHIGH will be made of the same type
3237 as the returned expression. If EXP is not a comparison, we will most
3238 likely not be returning a useful value and range. */
3241 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3243 enum tree_code code;
3244 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3245 tree orig_type = NULL_TREE;
3247 tree low, high, n_low, n_high;
3249 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3250 and see if we can refine the range. Some of the cases below may not
3251 happen, but it doesn't seem worth worrying about this. We "continue"
3252 the outer loop when we've changed something; otherwise we "break"
3253 the switch, which will "break" the while. */
3256 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3260 code = TREE_CODE (exp);
3262 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3264 if (first_rtl_op (code) > 0)
3265 arg0 = TREE_OPERAND (exp, 0);
3266 if (TREE_CODE_CLASS (code) == '<'
3267 || TREE_CODE_CLASS (code) == '1'
3268 || TREE_CODE_CLASS (code) == '2')
3269 type = TREE_TYPE (arg0);
3270 if (TREE_CODE_CLASS (code) == '2'
3271 || TREE_CODE_CLASS (code) == '<'
3272 || (TREE_CODE_CLASS (code) == 'e'
3273 && TREE_CODE_LENGTH (code) > 1))
3274 arg1 = TREE_OPERAND (exp, 1);
3277 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3278 lose a cast by accident. */
3279 if (type != NULL_TREE && orig_type == NULL_TREE)
3284 case TRUTH_NOT_EXPR:
3285 in_p = ! in_p, exp = arg0;
3288 case EQ_EXPR: case NE_EXPR:
3289 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3290 /* We can only do something if the range is testing for zero
3291 and if the second operand is an integer constant. Note that
3292 saying something is "in" the range we make is done by
3293 complementing IN_P since it will set in the initial case of
3294 being not equal to zero; "out" is leaving it alone. */
3295 if (low == 0 || high == 0
3296 || ! integer_zerop (low) || ! integer_zerop (high)
3297 || TREE_CODE (arg1) != INTEGER_CST)
3302 case NE_EXPR: /* - [c, c] */
3305 case EQ_EXPR: /* + [c, c] */
3306 in_p = ! in_p, low = high = arg1;
3308 case GT_EXPR: /* - [-, c] */
3309 low = 0, high = arg1;
3311 case GE_EXPR: /* + [c, -] */
3312 in_p = ! in_p, low = arg1, high = 0;
3314 case LT_EXPR: /* - [c, -] */
3315 low = arg1, high = 0;
3317 case LE_EXPR: /* + [-, c] */
3318 in_p = ! in_p, low = 0, high = arg1;
3326 /* If this is an unsigned comparison, we also know that EXP is
3327 greater than or equal to zero. We base the range tests we make
3328 on that fact, so we record it here so we can parse existing
3330 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3332 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3333 1, fold_convert (type, integer_zero_node),
3337 in_p = n_in_p, low = n_low, high = n_high;
3339 /* If the high bound is missing, but we have a nonzero low
3340 bound, reverse the range so it goes from zero to the low bound
3342 if (high == 0 && low && ! integer_zerop (low))
3345 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3346 integer_one_node, 0);
3347 low = fold_convert (type, integer_zero_node);
3353 /* (-x) IN [a,b] -> x in [-b, -a] */
3354 n_low = range_binop (MINUS_EXPR, type,
3355 fold_convert (type, integer_zero_node),
3357 n_high = range_binop (MINUS_EXPR, type,
3358 fold_convert (type, integer_zero_node),
3360 low = n_low, high = n_high;
3366 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3367 fold_convert (type, integer_one_node));
3370 case PLUS_EXPR: case MINUS_EXPR:
3371 if (TREE_CODE (arg1) != INTEGER_CST)
3374 /* If EXP is signed, any overflow in the computation is undefined,
3375 so we don't worry about it so long as our computations on
3376 the bounds don't overflow. For unsigned, overflow is defined
3377 and this is exactly the right thing. */
3378 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3379 type, low, 0, arg1, 0);
3380 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3381 type, high, 1, arg1, 0);
3382 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3383 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3386 /* Check for an unsigned range which has wrapped around the maximum
3387 value thus making n_high < n_low, and normalize it. */
3388 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3390 low = range_binop (PLUS_EXPR, type, n_high, 0,
3391 integer_one_node, 0);
3392 high = range_binop (MINUS_EXPR, type, n_low, 0,
3393 integer_one_node, 0);
3395 /* If the range is of the form +/- [ x+1, x ], we won't
3396 be able to normalize it. But then, it represents the
3397 whole range or the empty set, so make it
3399 if (tree_int_cst_equal (n_low, low)
3400 && tree_int_cst_equal (n_high, high))
3406 low = n_low, high = n_high;
3411 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3412 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3415 if (! INTEGRAL_TYPE_P (type)
3416 || (low != 0 && ! int_fits_type_p (low, type))
3417 || (high != 0 && ! int_fits_type_p (high, type)))
3420 n_low = low, n_high = high;
3423 n_low = fold_convert (type, n_low);
3426 n_high = fold_convert (type, n_high);
3428 /* If we're converting from an unsigned to a signed type,
3429 we will be doing the comparison as unsigned. The tests above
3430 have already verified that LOW and HIGH are both positive.
3432 So we have to make sure that the original unsigned value will
3433 be interpreted as positive. */
3434 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3436 tree equiv_type = (*lang_hooks.types.type_for_mode)
3437 (TYPE_MODE (type), 1);
3440 /* A range without an upper bound is, naturally, unbounded.
3441 Since convert would have cropped a very large value, use
3442 the max value for the destination type. */
3444 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3445 : TYPE_MAX_VALUE (type);
3447 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3448 high_positive = fold (build (RSHIFT_EXPR, type,
3452 integer_one_node)));
3454 /* If the low bound is specified, "and" the range with the
3455 range for which the original unsigned value will be
3459 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3460 1, n_low, n_high, 1,
3461 fold_convert (type, integer_zero_node),
3465 in_p = (n_in_p == in_p);
3469 /* Otherwise, "or" the range with the range of the input
3470 that will be interpreted as negative. */
3471 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3472 0, n_low, n_high, 1,
3473 fold_convert (type, integer_zero_node),
3477 in_p = (in_p != n_in_p);
3482 low = n_low, high = n_high;
3492 /* If EXP is a constant, we can evaluate whether this is true or false. */
3493 if (TREE_CODE (exp) == INTEGER_CST)
3495 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3497 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3503 *pin_p = in_p, *plow = low, *phigh = high;
3507 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3508 type, TYPE, return an expression to test if EXP is in (or out of, depending
3509 on IN_P) the range. */
3512 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3514 tree etype = TREE_TYPE (exp);
3518 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3519 return invert_truthvalue (value);
3521 if (low == 0 && high == 0)
3522 return fold_convert (type, integer_one_node);
3525 return fold (build (LE_EXPR, type, exp, high));
3528 return fold (build (GE_EXPR, type, exp, low));
3530 if (operand_equal_p (low, high, 0))
3531 return fold (build (EQ_EXPR, type, exp, low));
3533 if (integer_zerop (low))
3535 if (! TREE_UNSIGNED (etype))
3537 etype = (*lang_hooks.types.unsigned_type) (etype);
3538 high = fold_convert (etype, high);
3539 exp = fold_convert (etype, exp);
3541 return build_range_check (type, exp, 1, 0, high);
3544 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3545 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3547 unsigned HOST_WIDE_INT lo;
3551 /* For enums the comparison will be done in the underlying type,
3552 so using enum's precision is wrong here.
3553 Consider e.g. enum { A, B, C, D, E }, low == B and high == D. */
3554 if (TREE_CODE (etype) == ENUMERAL_TYPE)
3555 prec = GET_MODE_BITSIZE (TYPE_MODE (etype));
3557 prec = TYPE_PRECISION (etype);
3558 if (prec <= HOST_BITS_PER_WIDE_INT)
3561 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3565 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3566 lo = (unsigned HOST_WIDE_INT) -1;
3569 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3571 if (TREE_UNSIGNED (etype))
3573 etype = (*lang_hooks.types.signed_type) (etype);
3574 exp = fold_convert (etype, exp);
3576 return fold (build (GT_EXPR, type, exp,
3577 fold_convert (etype, integer_zero_node)));
3581 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3582 && ! TREE_OVERFLOW (value))
3583 return build_range_check (type,
3584 fold (build (MINUS_EXPR, etype, exp, low)),
3585 1, fold_convert (etype, integer_zero_node),
3591 /* Given two ranges, see if we can merge them into one. Return 1 if we
3592 can, 0 if we can't. Set the output range into the specified parameters. */
3595 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3596 tree high0, int in1_p, tree low1, tree high1)
3604 int lowequal = ((low0 == 0 && low1 == 0)
3605 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3606 low0, 0, low1, 0)));
3607 int highequal = ((high0 == 0 && high1 == 0)
3608 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3609 high0, 1, high1, 1)));
3611 /* Make range 0 be the range that starts first, or ends last if they
3612 start at the same value. Swap them if it isn't. */
3613 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3616 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3617 high1, 1, high0, 1))))
3619 temp = in0_p, in0_p = in1_p, in1_p = temp;
3620 tem = low0, low0 = low1, low1 = tem;
3621 tem = high0, high0 = high1, high1 = tem;
3624 /* Now flag two cases, whether the ranges are disjoint or whether the
3625 second range is totally subsumed in the first. Note that the tests
3626 below are simplified by the ones above. */
3627 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3628 high0, 1, low1, 0));
3629 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3630 high1, 1, high0, 1));
3632 /* We now have four cases, depending on whether we are including or
3633 excluding the two ranges. */
3636 /* If they don't overlap, the result is false. If the second range
3637 is a subset it is the result. Otherwise, the range is from the start
3638 of the second to the end of the first. */
3640 in_p = 0, low = high = 0;
3642 in_p = 1, low = low1, high = high1;
3644 in_p = 1, low = low1, high = high0;
3647 else if (in0_p && ! in1_p)
3649 /* If they don't overlap, the result is the first range. If they are
3650 equal, the result is false. If the second range is a subset of the
3651 first, and the ranges begin at the same place, we go from just after
3652 the end of the first range to the end of the second. If the second
3653 range is not a subset of the first, or if it is a subset and both
3654 ranges end at the same place, the range starts at the start of the
3655 first range and ends just before the second range.
3656 Otherwise, we can't describe this as a single range. */
3658 in_p = 1, low = low0, high = high0;
3659 else if (lowequal && highequal)
3660 in_p = 0, low = high = 0;
3661 else if (subset && lowequal)
3663 in_p = 1, high = high0;
3664 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3665 integer_one_node, 0);
3667 else if (! subset || highequal)
3669 in_p = 1, low = low0;
3670 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3671 integer_one_node, 0);
3677 else if (! in0_p && in1_p)
3679 /* If they don't overlap, the result is the second range. If the second
3680 is a subset of the first, the result is false. Otherwise,
3681 the range starts just after the first range and ends at the
3682 end of the second. */
3684 in_p = 1, low = low1, high = high1;
3685 else if (subset || highequal)
3686 in_p = 0, low = high = 0;
3689 in_p = 1, high = high1;
3690 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3691 integer_one_node, 0);
3697 /* The case where we are excluding both ranges. Here the complex case
3698 is if they don't overlap. In that case, the only time we have a
3699 range is if they are adjacent. If the second is a subset of the
3700 first, the result is the first. Otherwise, the range to exclude
3701 starts at the beginning of the first range and ends at the end of the
3705 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3706 range_binop (PLUS_EXPR, NULL_TREE,
3708 integer_one_node, 1),
3710 in_p = 0, low = low0, high = high1;
3715 in_p = 0, low = low0, high = high0;
3717 in_p = 0, low = low0, high = high1;
3720 *pin_p = in_p, *plow = low, *phigh = high;
3724 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3725 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3728 /* EXP is some logical combination of boolean tests. See if we can
3729 merge it into some range test. Return the new tree if so. */
3732 fold_range_test (tree exp)
3734 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3735 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3736 int in0_p, in1_p, in_p;
3737 tree low0, low1, low, high0, high1, high;
3738 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3739 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3742 /* If this is an OR operation, invert both sides; we will invert
3743 again at the end. */
3745 in0_p = ! in0_p, in1_p = ! in1_p;
3747 /* If both expressions are the same, if we can merge the ranges, and we
3748 can build the range test, return it or it inverted. If one of the
3749 ranges is always true or always false, consider it to be the same
3750 expression as the other. */
3751 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3752 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3754 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3756 : rhs != 0 ? rhs : integer_zero_node,
3758 return or_op ? invert_truthvalue (tem) : tem;
3760 /* On machines where the branch cost is expensive, if this is a
3761 short-circuited branch and the underlying object on both sides
3762 is the same, make a non-short-circuit operation. */
3763 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3764 && lhs != 0 && rhs != 0
3765 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3766 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3767 && operand_equal_p (lhs, rhs, 0))
3769 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3770 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3771 which cases we can't do this. */
3772 if (simple_operand_p (lhs))
3773 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3774 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3775 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3776 TREE_OPERAND (exp, 1));
3778 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3779 && ! CONTAINS_PLACEHOLDER_P (lhs))
3781 tree common = save_expr (lhs);
3783 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3784 or_op ? ! in0_p : in0_p,
3786 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3787 or_op ? ! in1_p : in1_p,
3789 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3790 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3791 TREE_TYPE (exp), lhs, rhs);
3798 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3799 bit value. Arrange things so the extra bits will be set to zero if and
3800 only if C is signed-extended to its full width. If MASK is nonzero,
3801 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3804 unextend (tree c, int p, int unsignedp, tree mask)
3806 tree type = TREE_TYPE (c);
3807 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3810 if (p == modesize || unsignedp)
3813 /* We work by getting just the sign bit into the low-order bit, then
3814 into the high-order bit, then sign-extend. We then XOR that value
3816 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3817 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3819 /* We must use a signed type in order to get an arithmetic right shift.
3820 However, we must also avoid introducing accidental overflows, so that
3821 a subsequent call to integer_zerop will work. Hence we must
3822 do the type conversion here. At this point, the constant is either
3823 zero or one, and the conversion to a signed type can never overflow.
3824 We could get an overflow if this conversion is done anywhere else. */
3825 if (TREE_UNSIGNED (type))
3826 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3828 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3829 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3831 temp = const_binop (BIT_AND_EXPR, temp,
3832 fold_convert (TREE_TYPE (c), mask), 0);
3833 /* If necessary, convert the type back to match the type of C. */
3834 if (TREE_UNSIGNED (type))
3835 temp = fold_convert (type, temp);
3837 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3840 /* Find ways of folding logical expressions of LHS and RHS:
3841 Try to merge two comparisons to the same innermost item.
3842 Look for range tests like "ch >= '0' && ch <= '9'".
3843 Look for combinations of simple terms on machines with expensive branches
3844 and evaluate the RHS unconditionally.
3846 For example, if we have p->a == 2 && p->b == 4 and we can make an
3847 object large enough to span both A and B, we can do this with a comparison
3848 against the object ANDed with the a mask.
3850 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3851 operations to do this with one comparison.
3853 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3854 function and the one above.
3856 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3857 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3859 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3862 We return the simplified tree or 0 if no optimization is possible. */
3865 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3867 /* If this is the "or" of two comparisons, we can do something if
3868 the comparisons are NE_EXPR. If this is the "and", we can do something
3869 if the comparisons are EQ_EXPR. I.e.,
3870 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3872 WANTED_CODE is this operation code. For single bit fields, we can
3873 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3874 comparison for one-bit fields. */
3876 enum tree_code wanted_code;
3877 enum tree_code lcode, rcode;
3878 tree ll_arg, lr_arg, rl_arg, rr_arg;
3879 tree ll_inner, lr_inner, rl_inner, rr_inner;
3880 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3881 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3882 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3883 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3884 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3885 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3886 enum machine_mode lnmode, rnmode;
3887 tree ll_mask, lr_mask, rl_mask, rr_mask;
3888 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3889 tree l_const, r_const;
3890 tree lntype, rntype, result;
3891 int first_bit, end_bit;
3894 /* Start by getting the comparison codes. Fail if anything is volatile.
3895 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3896 it were surrounded with a NE_EXPR. */
3898 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3901 lcode = TREE_CODE (lhs);
3902 rcode = TREE_CODE (rhs);
3904 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3905 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3907 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3908 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3910 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3913 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3914 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3916 ll_arg = TREE_OPERAND (lhs, 0);
3917 lr_arg = TREE_OPERAND (lhs, 1);
3918 rl_arg = TREE_OPERAND (rhs, 0);
3919 rr_arg = TREE_OPERAND (rhs, 1);
3921 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3922 if (simple_operand_p (ll_arg)
3923 && simple_operand_p (lr_arg)
3924 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3928 if (operand_equal_p (ll_arg, rl_arg, 0)
3929 && operand_equal_p (lr_arg, rr_arg, 0))
3931 int lcompcode, rcompcode;
3933 lcompcode = comparison_to_compcode (lcode);
3934 rcompcode = comparison_to_compcode (rcode);
3935 compcode = (code == TRUTH_AND_EXPR)
3936 ? lcompcode & rcompcode
3937 : lcompcode | rcompcode;
3939 else if (operand_equal_p (ll_arg, rr_arg, 0)
3940 && operand_equal_p (lr_arg, rl_arg, 0))
3942 int lcompcode, rcompcode;
3944 rcode = swap_tree_comparison (rcode);
3945 lcompcode = comparison_to_compcode (lcode);
3946 rcompcode = comparison_to_compcode (rcode);
3947 compcode = (code == TRUTH_AND_EXPR)
3948 ? lcompcode & rcompcode
3949 : lcompcode | rcompcode;
3954 if (compcode == COMPCODE_TRUE)
3955 return fold_convert (truth_type, integer_one_node);
3956 else if (compcode == COMPCODE_FALSE)
3957 return fold_convert (truth_type, integer_zero_node);
3958 else if (compcode != -1)
3959 return build (compcode_to_comparison (compcode),
3960 truth_type, ll_arg, lr_arg);
3963 /* If the RHS can be evaluated unconditionally and its operands are
3964 simple, it wins to evaluate the RHS unconditionally on machines
3965 with expensive branches. In this case, this isn't a comparison
3966 that can be merged. Avoid doing this if the RHS is a floating-point
3967 comparison since those can trap. */
3969 if (BRANCH_COST >= 2
3970 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3971 && simple_operand_p (rl_arg)
3972 && simple_operand_p (rr_arg))
3974 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3975 if (code == TRUTH_OR_EXPR
3976 && lcode == NE_EXPR && integer_zerop (lr_arg)
3977 && rcode == NE_EXPR && integer_zerop (rr_arg)
3978 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3979 return build (NE_EXPR, truth_type,
3980 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3984 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3985 if (code == TRUTH_AND_EXPR
3986 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3987 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3988 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3989 return build (EQ_EXPR, truth_type,
3990 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3994 return build (code, truth_type, lhs, rhs);
3997 /* See if the comparisons can be merged. Then get all the parameters for
4000 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4001 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4005 ll_inner = decode_field_reference (ll_arg,
4006 &ll_bitsize, &ll_bitpos, &ll_mode,
4007 &ll_unsignedp, &volatilep, &ll_mask,
4009 lr_inner = decode_field_reference (lr_arg,
4010 &lr_bitsize, &lr_bitpos, &lr_mode,
4011 &lr_unsignedp, &volatilep, &lr_mask,
4013 rl_inner = decode_field_reference (rl_arg,
4014 &rl_bitsize, &rl_bitpos, &rl_mode,
4015 &rl_unsignedp, &volatilep, &rl_mask,
4017 rr_inner = decode_field_reference (rr_arg,
4018 &rr_bitsize, &rr_bitpos, &rr_mode,
4019 &rr_unsignedp, &volatilep, &rr_mask,
4022 /* It must be true that the inner operation on the lhs of each
4023 comparison must be the same if we are to be able to do anything.
4024 Then see if we have constants. If not, the same must be true for
4026 if (volatilep || ll_inner == 0 || rl_inner == 0
4027 || ! operand_equal_p (ll_inner, rl_inner, 0))
4030 if (TREE_CODE (lr_arg) == INTEGER_CST
4031 && TREE_CODE (rr_arg) == INTEGER_CST)
4032 l_const = lr_arg, r_const = rr_arg;
4033 else if (lr_inner == 0 || rr_inner == 0
4034 || ! operand_equal_p (lr_inner, rr_inner, 0))
4037 l_const = r_const = 0;
4039 /* If either comparison code is not correct for our logical operation,
4040 fail. However, we can convert a one-bit comparison against zero into
4041 the opposite comparison against that bit being set in the field. */
4043 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4044 if (lcode != wanted_code)
4046 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4048 /* Make the left operand unsigned, since we are only interested
4049 in the value of one bit. Otherwise we are doing the wrong
4058 /* This is analogous to the code for l_const above. */
4059 if (rcode != wanted_code)
4061 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4070 /* After this point all optimizations will generate bit-field
4071 references, which we might not want. */
4072 if (! (*lang_hooks.can_use_bit_fields_p) ())
4075 /* See if we can find a mode that contains both fields being compared on
4076 the left. If we can't, fail. Otherwise, update all constants and masks
4077 to be relative to a field of that size. */
4078 first_bit = MIN (ll_bitpos, rl_bitpos);
4079 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4080 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4081 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4083 if (lnmode == VOIDmode)
4086 lnbitsize = GET_MODE_BITSIZE (lnmode);
4087 lnbitpos = first_bit & ~ (lnbitsize - 1);
4088 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4089 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4091 if (BYTES_BIG_ENDIAN)
4093 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4094 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4097 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4098 size_int (xll_bitpos), 0);
4099 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4100 size_int (xrl_bitpos), 0);
4104 l_const = fold_convert (lntype, l_const);
4105 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4106 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4107 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4108 fold (build1 (BIT_NOT_EXPR,
4112 warning ("comparison is always %d", wanted_code == NE_EXPR);
4114 return fold_convert (truth_type,
4115 wanted_code == NE_EXPR
4116 ? integer_one_node : integer_zero_node);
4121 r_const = fold_convert (lntype, r_const);
4122 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4123 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4124 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4125 fold (build1 (BIT_NOT_EXPR,
4129 warning ("comparison is always %d", wanted_code == NE_EXPR);
4131 return fold_convert (truth_type,
4132 wanted_code == NE_EXPR
4133 ? integer_one_node : integer_zero_node);
4137 /* If the right sides are not constant, do the same for it. Also,
4138 disallow this optimization if a size or signedness mismatch occurs
4139 between the left and right sides. */
4142 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4143 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4144 /* Make sure the two fields on the right
4145 correspond to the left without being swapped. */
4146 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4149 first_bit = MIN (lr_bitpos, rr_bitpos);
4150 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4151 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4152 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4154 if (rnmode == VOIDmode)
4157 rnbitsize = GET_MODE_BITSIZE (rnmode);
4158 rnbitpos = first_bit & ~ (rnbitsize - 1);
4159 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4160 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4162 if (BYTES_BIG_ENDIAN)
4164 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4165 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4168 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4169 size_int (xlr_bitpos), 0);
4170 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4171 size_int (xrr_bitpos), 0);
4173 /* Make a mask that corresponds to both fields being compared.
4174 Do this for both items being compared. If the operands are the
4175 same size and the bits being compared are in the same position
4176 then we can do this by masking both and comparing the masked
4178 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4179 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4180 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4182 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4183 ll_unsignedp || rl_unsignedp);
4184 if (! all_ones_mask_p (ll_mask, lnbitsize))
4185 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4187 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4188 lr_unsignedp || rr_unsignedp);
4189 if (! all_ones_mask_p (lr_mask, rnbitsize))
4190 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4192 return build (wanted_code, truth_type, lhs, rhs);
4195 /* There is still another way we can do something: If both pairs of
4196 fields being compared are adjacent, we may be able to make a wider
4197 field containing them both.
4199 Note that we still must mask the lhs/rhs expressions. Furthermore,
4200 the mask must be shifted to account for the shift done by
4201 make_bit_field_ref. */
4202 if ((ll_bitsize + ll_bitpos == rl_bitpos
4203 && lr_bitsize + lr_bitpos == rr_bitpos)
4204 || (ll_bitpos == rl_bitpos + rl_bitsize
4205 && lr_bitpos == rr_bitpos + rr_bitsize))
4209 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4210 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4211 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4212 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4214 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4215 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4216 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4217 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4219 /* Convert to the smaller type before masking out unwanted bits. */
4221 if (lntype != rntype)
4223 if (lnbitsize > rnbitsize)
4225 lhs = fold_convert (rntype, lhs);
4226 ll_mask = fold_convert (rntype, ll_mask);
4229 else if (lnbitsize < rnbitsize)
4231 rhs = fold_convert (lntype, rhs);
4232 lr_mask = fold_convert (lntype, lr_mask);
4237 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4238 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4240 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4241 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4243 return build (wanted_code, truth_type, lhs, rhs);
4249 /* Handle the case of comparisons with constants. If there is something in
4250 common between the masks, those bits of the constants must be the same.
4251 If not, the condition is always false. Test for this to avoid generating
4252 incorrect code below. */
4253 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4254 if (! integer_zerop (result)
4255 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4256 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4258 if (wanted_code == NE_EXPR)
4260 warning ("`or' of unmatched not-equal tests is always 1");
4261 return fold_convert (truth_type, integer_one_node);
4265 warning ("`and' of mutually exclusive equal-tests is always 0");
4266 return fold_convert (truth_type, integer_zero_node);
4270 /* Construct the expression we will return. First get the component
4271 reference we will make. Unless the mask is all ones the width of
4272 that field, perform the mask operation. Then compare with the
4274 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4275 ll_unsignedp || rl_unsignedp);
4277 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4278 if (! all_ones_mask_p (ll_mask, lnbitsize))
4279 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4281 return build (wanted_code, truth_type, result,
4282 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4285 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4289 optimize_minmax_comparison (tree t)
4291 tree type = TREE_TYPE (t);
4292 tree arg0 = TREE_OPERAND (t, 0);
4293 enum tree_code op_code;
4294 tree comp_const = TREE_OPERAND (t, 1);
4296 int consts_equal, consts_lt;
4299 STRIP_SIGN_NOPS (arg0);
4301 op_code = TREE_CODE (arg0);
4302 minmax_const = TREE_OPERAND (arg0, 1);
4303 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4304 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4305 inner = TREE_OPERAND (arg0, 0);
4307 /* If something does not permit us to optimize, return the original tree. */
4308 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4309 || TREE_CODE (comp_const) != INTEGER_CST
4310 || TREE_CONSTANT_OVERFLOW (comp_const)
4311 || TREE_CODE (minmax_const) != INTEGER_CST
4312 || TREE_CONSTANT_OVERFLOW (minmax_const))
4315 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4316 and GT_EXPR, doing the rest with recursive calls using logical
4318 switch (TREE_CODE (t))
4320 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4322 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4326 fold (build (TRUTH_ORIF_EXPR, type,
4327 optimize_minmax_comparison
4328 (build (EQ_EXPR, type, arg0, comp_const)),
4329 optimize_minmax_comparison
4330 (build (GT_EXPR, type, arg0, comp_const))));
4333 if (op_code == MAX_EXPR && consts_equal)
4334 /* MAX (X, 0) == 0 -> X <= 0 */
4335 return fold (build (LE_EXPR, type, inner, comp_const));
4337 else if (op_code == MAX_EXPR && consts_lt)
4338 /* MAX (X, 0) == 5 -> X == 5 */
4339 return fold (build (EQ_EXPR, type, inner, comp_const));
4341 else if (op_code == MAX_EXPR)
4342 /* MAX (X, 0) == -1 -> false */
4343 return omit_one_operand (type, integer_zero_node, inner);
4345 else if (consts_equal)
4346 /* MIN (X, 0) == 0 -> X >= 0 */
4347 return fold (build (GE_EXPR, type, inner, comp_const));
4350 /* MIN (X, 0) == 5 -> false */
4351 return omit_one_operand (type, integer_zero_node, inner);
4354 /* MIN (X, 0) == -1 -> X == -1 */
4355 return fold (build (EQ_EXPR, type, inner, comp_const));
4358 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4359 /* MAX (X, 0) > 0 -> X > 0
4360 MAX (X, 0) > 5 -> X > 5 */
4361 return fold (build (GT_EXPR, type, inner, comp_const));
4363 else if (op_code == MAX_EXPR)
4364 /* MAX (X, 0) > -1 -> true */
4365 return omit_one_operand (type, integer_one_node, inner);
4367 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4368 /* MIN (X, 0) > 0 -> false
4369 MIN (X, 0) > 5 -> false */
4370 return omit_one_operand (type, integer_zero_node, inner);
4373 /* MIN (X, 0) > -1 -> X > -1 */
4374 return fold (build (GT_EXPR, type, inner, comp_const));
4381 /* T is an integer expression that is being multiplied, divided, or taken a
4382 modulus (CODE says which and what kind of divide or modulus) by a
4383 constant C. See if we can eliminate that operation by folding it with
4384 other operations already in T. WIDE_TYPE, if non-null, is a type that
4385 should be used for the computation if wider than our type.
4387 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4388 (X * 2) + (Y * 4). We must, however, be assured that either the original
4389 expression would not overflow or that overflow is undefined for the type
4390 in the language in question.
4392 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4393 the machine has a multiply-accumulate insn or that this is part of an
4394 addressing calculation.
4396 If we return a non-null expression, it is an equivalent form of the
4397 original computation, but need not be in the original type. */
4400 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4402 /* To avoid exponential search depth, refuse to allow recursion past
4403 three levels. Beyond that (1) it's highly unlikely that we'll find
4404 something interesting and (2) we've probably processed it before
4405 when we built the inner expression. */
4414 ret = extract_muldiv_1 (t, c, code, wide_type);
4421 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4423 tree type = TREE_TYPE (t);
4424 enum tree_code tcode = TREE_CODE (t);
4425 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4426 > GET_MODE_SIZE (TYPE_MODE (type)))
4427 ? wide_type : type);
4429 int same_p = tcode == code;
4430 tree op0 = NULL_TREE, op1 = NULL_TREE;
4432 /* Don't deal with constants of zero here; they confuse the code below. */
4433 if (integer_zerop (c))
4436 if (TREE_CODE_CLASS (tcode) == '1')
4437 op0 = TREE_OPERAND (t, 0);
4439 if (TREE_CODE_CLASS (tcode) == '2')
4440 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4442 /* Note that we need not handle conditional operations here since fold
4443 already handles those cases. So just do arithmetic here. */
4447 /* For a constant, we can always simplify if we are a multiply
4448 or (for divide and modulus) if it is a multiple of our constant. */
4449 if (code == MULT_EXPR
4450 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4451 return const_binop (code, fold_convert (ctype, t),
4452 fold_convert (ctype, c), 0);
4455 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4456 /* If op0 is an expression ... */
4457 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4458 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4459 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4460 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4461 /* ... and is unsigned, and its type is smaller than ctype,
4462 then we cannot pass through as widening. */
4463 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4464 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4465 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4466 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4467 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4468 /* ... or this is a truncation (t is narrower than op0),
4469 then we cannot pass through this narrowing. */
4470 || (GET_MODE_SIZE (TYPE_MODE (type))
4471 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4472 /* ... or signedness changes for division or modulus,
4473 then we cannot pass through this conversion. */
4474 || (code != MULT_EXPR
4475 && (TREE_UNSIGNED (ctype)
4476 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4479 /* Pass the constant down and see if we can make a simplification. If
4480 we can, replace this expression with the inner simplification for
4481 possible later conversion to our or some other type. */
4482 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4483 && TREE_CODE (t2) == INTEGER_CST
4484 && ! TREE_CONSTANT_OVERFLOW (t2)
4485 && (0 != (t1 = extract_muldiv (op0, t2, code,
4487 ? ctype : NULL_TREE))))
4492 /* If widening the type changes it from signed to unsigned, then we
4493 must avoid building ABS_EXPR itself as unsigned. */
4494 if (TREE_UNSIGNED (ctype) && !TREE_UNSIGNED (type))
4496 tree cstype = (*lang_hooks.types.signed_type) (ctype);
4497 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
4499 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
4500 return fold_convert (ctype, t1);
4506 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4507 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4510 case MIN_EXPR: case MAX_EXPR:
4511 /* If widening the type changes the signedness, then we can't perform
4512 this optimization as that changes the result. */
4513 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4516 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4517 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4518 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4520 if (tree_int_cst_sgn (c) < 0)
4521 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4523 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4524 fold_convert (ctype, t2)));
4528 case WITH_RECORD_EXPR:
4529 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4530 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4531 TREE_OPERAND (t, 1));
4534 case LSHIFT_EXPR: case RSHIFT_EXPR:
4535 /* If the second operand is constant, this is a multiplication
4536 or floor division, by a power of two, so we can treat it that
4537 way unless the multiplier or divisor overflows. */
4538 if (TREE_CODE (op1) == INTEGER_CST
4539 /* const_binop may not detect overflow correctly,
4540 so check for it explicitly here. */
4541 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4542 && TREE_INT_CST_HIGH (op1) == 0
4543 && 0 != (t1 = fold_convert (ctype,
4544 const_binop (LSHIFT_EXPR,
4547 && ! TREE_OVERFLOW (t1))
4548 return extract_muldiv (build (tcode == LSHIFT_EXPR
4549 ? MULT_EXPR : FLOOR_DIV_EXPR,
4550 ctype, fold_convert (ctype, op0), t1),
4551 c, code, wide_type);
4554 case PLUS_EXPR: case MINUS_EXPR:
4555 /* See if we can eliminate the operation on both sides. If we can, we
4556 can return a new PLUS or MINUS. If we can't, the only remaining
4557 cases where we can do anything are if the second operand is a
4559 t1 = extract_muldiv (op0, c, code, wide_type);
4560 t2 = extract_muldiv (op1, c, code, wide_type);
4561 if (t1 != 0 && t2 != 0
4562 && (code == MULT_EXPR
4563 /* If not multiplication, we can only do this if both operands
4564 are divisible by c. */
4565 || (multiple_of_p (ctype, op0, c)
4566 && multiple_of_p (ctype, op1, c))))
4567 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4568 fold_convert (ctype, t2)));
4570 /* If this was a subtraction, negate OP1 and set it to be an addition.
4571 This simplifies the logic below. */
4572 if (tcode == MINUS_EXPR)
4573 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4575 if (TREE_CODE (op1) != INTEGER_CST)
4578 /* If either OP1 or C are negative, this optimization is not safe for
4579 some of the division and remainder types while for others we need
4580 to change the code. */
4581 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4583 if (code == CEIL_DIV_EXPR)
4584 code = FLOOR_DIV_EXPR;
4585 else if (code == FLOOR_DIV_EXPR)
4586 code = CEIL_DIV_EXPR;
4587 else if (code != MULT_EXPR
4588 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4592 /* If it's a multiply or a division/modulus operation of a multiple
4593 of our constant, do the operation and verify it doesn't overflow. */
4594 if (code == MULT_EXPR
4595 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4597 op1 = const_binop (code, fold_convert (ctype, op1),
4598 fold_convert (ctype, c), 0);
4599 /* We allow the constant to overflow with wrapping semantics. */
4601 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4607 /* If we have an unsigned type is not a sizetype, we cannot widen
4608 the operation since it will change the result if the original
4609 computation overflowed. */
4610 if (TREE_UNSIGNED (ctype)
4611 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4615 /* If we were able to eliminate our operation from the first side,
4616 apply our operation to the second side and reform the PLUS. */
4617 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4618 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4620 /* The last case is if we are a multiply. In that case, we can
4621 apply the distributive law to commute the multiply and addition
4622 if the multiplication of the constants doesn't overflow. */
4623 if (code == MULT_EXPR)
4624 return fold (build (tcode, ctype,
4625 fold (build (code, ctype,
4626 fold_convert (ctype, op0),
4627 fold_convert (ctype, c))),
4633 /* We have a special case here if we are doing something like
4634 (C * 8) % 4 since we know that's zero. */
4635 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4636 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4637 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4638 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4639 return omit_one_operand (type, integer_zero_node, op0);
4641 /* ... fall through ... */
4643 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4644 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4645 /* If we can extract our operation from the LHS, do so and return a
4646 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4647 do something only if the second operand is a constant. */
4649 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4650 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4651 fold_convert (ctype, op1)));
4652 else if (tcode == MULT_EXPR && code == MULT_EXPR
4653 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4654 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4655 fold_convert (ctype, t1)));
4656 else if (TREE_CODE (op1) != INTEGER_CST)
4659 /* If these are the same operation types, we can associate them
4660 assuming no overflow. */
4662 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4663 fold_convert (ctype, c), 0))
4664 && ! TREE_OVERFLOW (t1))
4665 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4667 /* If these operations "cancel" each other, we have the main
4668 optimizations of this pass, which occur when either constant is a
4669 multiple of the other, in which case we replace this with either an
4670 operation or CODE or TCODE.
4672 If we have an unsigned type that is not a sizetype, we cannot do
4673 this since it will change the result if the original computation
4675 if ((! TREE_UNSIGNED (ctype)
4676 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4678 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4679 || (tcode == MULT_EXPR
4680 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4681 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4683 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4684 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4685 fold_convert (ctype,
4686 const_binop (TRUNC_DIV_EXPR,
4688 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4689 return fold (build (code, ctype, fold_convert (ctype, op0),
4690 fold_convert (ctype,
4691 const_binop (TRUNC_DIV_EXPR,
4703 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4704 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4705 that we may sometimes modify the tree. */
4708 strip_compound_expr (tree t, tree s)
4710 enum tree_code code = TREE_CODE (t);
4712 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4713 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4714 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4715 return TREE_OPERAND (t, 1);
4717 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4718 don't bother handling any other types. */
4719 else if (code == COND_EXPR)
4721 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4722 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4723 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4725 else if (TREE_CODE_CLASS (code) == '1')
4726 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4727 else if (TREE_CODE_CLASS (code) == '<'
4728 || TREE_CODE_CLASS (code) == '2')
4730 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4731 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4737 /* Return a node which has the indicated constant VALUE (either 0 or
4738 1), and is of the indicated TYPE. */
4741 constant_boolean_node (int value, tree type)
4743 if (type == integer_type_node)
4744 return value ? integer_one_node : integer_zero_node;
4745 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4746 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4750 tree t = build_int_2 (value, 0);
4752 TREE_TYPE (t) = type;
4757 /* Utility function for the following routine, to see how complex a nesting of
4758 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4759 we don't care (to avoid spending too much time on complex expressions.). */
4762 count_cond (tree expr, int lim)
4766 if (TREE_CODE (expr) != COND_EXPR)
4771 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4772 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4773 return MIN (lim, 1 + ctrue + cfalse);
4776 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4777 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4778 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4779 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4780 COND is the first argument to CODE; otherwise (as in the example
4781 given here), it is the second argument. TYPE is the type of the
4782 original expression. */
4785 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4786 tree cond, tree arg, int cond_first_p)
4788 tree test, true_value, false_value;
4789 tree lhs = NULL_TREE;
4790 tree rhs = NULL_TREE;
4791 /* In the end, we'll produce a COND_EXPR. Both arms of the
4792 conditional expression will be binary operations. The left-hand
4793 side of the expression to be executed if the condition is true
4794 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4795 of the expression to be executed if the condition is true will be
4796 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4797 but apply to the expression to be executed if the conditional is
4803 /* These are the codes to use for the left-hand side and right-hand
4804 side of the COND_EXPR. Normally, they are the same as CODE. */
4805 enum tree_code lhs_code = code;
4806 enum tree_code rhs_code = code;
4807 /* And these are the types of the expressions. */
4808 tree lhs_type = type;
4809 tree rhs_type = type;
4814 true_rhs = false_rhs = &arg;
4815 true_lhs = &true_value;
4816 false_lhs = &false_value;
4820 true_lhs = false_lhs = &arg;
4821 true_rhs = &true_value;
4822 false_rhs = &false_value;
4825 if (TREE_CODE (cond) == COND_EXPR)
4827 test = TREE_OPERAND (cond, 0);
4828 true_value = TREE_OPERAND (cond, 1);
4829 false_value = TREE_OPERAND (cond, 2);
4830 /* If this operand throws an expression, then it does not make
4831 sense to try to perform a logical or arithmetic operation
4832 involving it. Instead of building `a + throw 3' for example,
4833 we simply build `a, throw 3'. */
4834 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4838 lhs_code = COMPOUND_EXPR;
4839 lhs_type = void_type_node;
4844 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4848 rhs_code = COMPOUND_EXPR;
4849 rhs_type = void_type_node;
4857 tree testtype = TREE_TYPE (cond);
4859 true_value = fold_convert (testtype, integer_one_node);
4860 false_value = fold_convert (testtype, integer_zero_node);
4863 /* If ARG is complex we want to make sure we only evaluate it once. Though
4864 this is only required if it is volatile, it might be more efficient even
4865 if it is not. However, if we succeed in folding one part to a constant,
4866 we do not need to make this SAVE_EXPR. Since we do this optimization
4867 primarily to see if we do end up with constant and this SAVE_EXPR
4868 interferes with later optimizations, suppressing it when we can is
4871 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4872 do so. Don't try to see if the result is a constant if an arm is a
4873 COND_EXPR since we get exponential behavior in that case. */
4875 if (saved_expr_p (arg))
4877 else if (lhs == 0 && rhs == 0
4878 && !TREE_CONSTANT (arg)
4879 && (*lang_hooks.decls.global_bindings_p) () == 0
4880 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4881 || TREE_SIDE_EFFECTS (arg)))
4883 if (TREE_CODE (true_value) != COND_EXPR)
4884 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4886 if (TREE_CODE (false_value) != COND_EXPR)
4887 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4889 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4890 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4892 arg = save_expr (arg);
4894 save = saved_expr_p (arg);
4899 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4901 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4903 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4905 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4906 ahead of the COND_EXPR we made. Otherwise we would have it only
4907 evaluated in one branch, with the other branch using the result
4908 but missing the evaluation code. Beware that the save_expr call
4909 above might not return a SAVE_EXPR, so testing the TREE_CODE
4910 of ARG is not enough to decide here. Â */
4912 return build (COMPOUND_EXPR, type,
4913 fold_convert (void_type_node, arg),
4914 strip_compound_expr (test, arg));
4916 return fold_convert (type, test);
4920 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4922 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4923 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4924 ADDEND is the same as X.
4926 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4927 and finite. The problematic cases are when X is zero, and its mode
4928 has signed zeros. In the case of rounding towards -infinity,
4929 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4930 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4933 fold_real_zero_addition_p (tree type, tree addend, int negate)
4935 if (!real_zerop (addend))
4938 /* Don't allow the fold with -fsignaling-nans. */
4939 if (HONOR_SNANS (TYPE_MODE (type)))
4942 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4943 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4946 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4947 if (TREE_CODE (addend) == REAL_CST
4948 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4951 /* The mode has signed zeros, and we have to honor their sign.
4952 In this situation, there is only one case we can return true for.
4953 X - 0 is the same as X unless rounding towards -infinity is
4955 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4958 /* Subroutine of fold() that checks comparisons of built-in math
4959 functions against real constants.
4961 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4962 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4963 is the type of the result and ARG0 and ARG1 are the operands of the
4964 comparison. ARG1 must be a TREE_REAL_CST.
4966 The function returns the constant folded tree if a simplification
4967 can be made, and NULL_TREE otherwise. */
4970 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4971 tree type, tree arg0, tree arg1)
4975 if (fcode == BUILT_IN_SQRT
4976 || fcode == BUILT_IN_SQRTF
4977 || fcode == BUILT_IN_SQRTL)
4979 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4980 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4982 c = TREE_REAL_CST (arg1);
4983 if (REAL_VALUE_NEGATIVE (c))
4985 /* sqrt(x) < y is always false, if y is negative. */
4986 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4987 return omit_one_operand (type,
4988 fold_convert (type, integer_zero_node),
4991 /* sqrt(x) > y is always true, if y is negative and we
4992 don't care about NaNs, i.e. negative values of x. */
4993 if (code == NE_EXPR || !HONOR_NANS (mode))
4994 return omit_one_operand (type,
4995 fold_convert (type, integer_one_node),
4998 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4999 return fold (build (GE_EXPR, type, arg,
5000 build_real (TREE_TYPE (arg), dconst0)));
5002 else if (code == GT_EXPR || code == GE_EXPR)
5006 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5007 real_convert (&c2, mode, &c2);
5009 if (REAL_VALUE_ISINF (c2))
5011 /* sqrt(x) > y is x == +Inf, when y is very large. */
5012 if (HONOR_INFINITIES (mode))
5013 return fold (build (EQ_EXPR, type, arg,
5014 build_real (TREE_TYPE (arg), c2)));
5016 /* sqrt(x) > y is always false, when y is very large
5017 and we don't care about infinities. */
5018 return omit_one_operand (type,
5019 fold_convert (type, integer_zero_node),
5023 /* sqrt(x) > c is the same as x > c*c. */
5024 return fold (build (code, type, arg,
5025 build_real (TREE_TYPE (arg), c2)));
5027 else if (code == LT_EXPR || code == LE_EXPR)
5031 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5032 real_convert (&c2, mode, &c2);
5034 if (REAL_VALUE_ISINF (c2))
5036 /* sqrt(x) < y is always true, when y is a very large
5037 value and we don't care about NaNs or Infinities. */
5038 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5039 return omit_one_operand (type,
5040 fold_convert (type, integer_one_node),
5043 /* sqrt(x) < y is x != +Inf when y is very large and we
5044 don't care about NaNs. */
5045 if (! HONOR_NANS (mode))
5046 return fold (build (NE_EXPR, type, arg,
5047 build_real (TREE_TYPE (arg), c2)));
5049 /* sqrt(x) < y is x >= 0 when y is very large and we
5050 don't care about Infinities. */
5051 if (! HONOR_INFINITIES (mode))
5052 return fold (build (GE_EXPR, type, arg,
5053 build_real (TREE_TYPE (arg), dconst0)));
5055 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5056 if ((*lang_hooks.decls.global_bindings_p) () != 0
5057 || CONTAINS_PLACEHOLDER_P (arg))
5060 arg = save_expr (arg);
5061 return fold (build (TRUTH_ANDIF_EXPR, type,
5062 fold (build (GE_EXPR, type, arg,
5063 build_real (TREE_TYPE (arg),
5065 fold (build (NE_EXPR, type, arg,
5066 build_real (TREE_TYPE (arg),
5070 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5071 if (! HONOR_NANS (mode))
5072 return fold (build (code, type, arg,
5073 build_real (TREE_TYPE (arg), c2)));
5075 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5076 if ((*lang_hooks.decls.global_bindings_p) () == 0
5077 && ! CONTAINS_PLACEHOLDER_P (arg))
5079 arg = save_expr (arg);
5080 return fold (build (TRUTH_ANDIF_EXPR, type,
5081 fold (build (GE_EXPR, type, arg,
5082 build_real (TREE_TYPE (arg),
5084 fold (build (code, type, arg,
5085 build_real (TREE_TYPE (arg),
5094 /* Subroutine of fold() that optimizes comparisons against Infinities,
5095 either +Inf or -Inf.
5097 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5098 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5099 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5101 The function returns the constant folded tree if a simplification
5102 can be made, and NULL_TREE otherwise. */
5105 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5107 enum machine_mode mode;
5108 REAL_VALUE_TYPE max;
5112 mode = TYPE_MODE (TREE_TYPE (arg0));
5114 /* For negative infinity swap the sense of the comparison. */
5115 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5117 code = swap_tree_comparison (code);
5122 /* x > +Inf is always false, if with ignore sNANs. */
5123 if (HONOR_SNANS (mode))
5125 return omit_one_operand (type,
5126 fold_convert (type, integer_zero_node),
5130 /* x <= +Inf is always true, if we don't case about NaNs. */
5131 if (! HONOR_NANS (mode))
5132 return omit_one_operand (type,
5133 fold_convert (type, integer_one_node),
5136 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5137 if ((*lang_hooks.decls.global_bindings_p) () == 0
5138 && ! CONTAINS_PLACEHOLDER_P (arg0))
5140 arg0 = save_expr (arg0);
5141 return fold (build (EQ_EXPR, type, arg0, arg0));
5147 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5148 real_maxval (&max, neg, mode);
5149 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5150 arg0, build_real (TREE_TYPE (arg0), max)));
5153 /* x < +Inf is always equal to x <= DBL_MAX. */
5154 real_maxval (&max, neg, mode);
5155 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5156 arg0, build_real (TREE_TYPE (arg0), max)));
5159 /* x != +Inf is always equal to !(x > DBL_MAX). */
5160 real_maxval (&max, neg, mode);
5161 if (! HONOR_NANS (mode))
5162 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5163 arg0, build_real (TREE_TYPE (arg0), max)));
5164 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5165 arg0, build_real (TREE_TYPE (arg0), max)));
5166 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5175 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5176 equality/inequality test, then return a simplified form of
5177 the test using shifts and logical operations. Otherwise return
5178 NULL. TYPE is the desired result type. */
5181 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5184 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5186 if (code == TRUTH_NOT_EXPR)
5188 code = TREE_CODE (arg0);
5189 if (code != NE_EXPR && code != EQ_EXPR)
5192 /* Extract the arguments of the EQ/NE. */
5193 arg1 = TREE_OPERAND (arg0, 1);
5194 arg0 = TREE_OPERAND (arg0, 0);
5196 /* This requires us to invert the code. */
5197 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5200 /* If this is testing a single bit, we can optimize the test. */
5201 if ((code == NE_EXPR || code == EQ_EXPR)
5202 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5203 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5205 tree inner = TREE_OPERAND (arg0, 0);
5206 tree type = TREE_TYPE (arg0);
5207 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5208 enum machine_mode operand_mode = TYPE_MODE (type);
5210 tree signed_type, unsigned_type, intermediate_type;
5213 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5214 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5215 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5216 if (arg00 != NULL_TREE
5217 /* This is only a win if casting to a signed type is cheap,
5218 i.e. when arg00's type is not a partial mode. */
5219 && TYPE_PRECISION (TREE_TYPE (arg00))
5220 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5222 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5223 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5224 fold_convert (stype, arg00),
5225 fold_convert (stype, integer_zero_node)));
5228 /* Otherwise we have (A & C) != 0 where C is a single bit,
5229 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5230 Similarly for (A & C) == 0. */
5232 /* If INNER is a right shift of a constant and it plus BITNUM does
5233 not overflow, adjust BITNUM and INNER. */
5234 if (TREE_CODE (inner) == RSHIFT_EXPR
5235 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5236 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5237 && bitnum < TYPE_PRECISION (type)
5238 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5239 bitnum - TYPE_PRECISION (type)))
5241 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5242 inner = TREE_OPERAND (inner, 0);
5245 /* If we are going to be able to omit the AND below, we must do our
5246 operations as unsigned. If we must use the AND, we have a choice.
5247 Normally unsigned is faster, but for some machines signed is. */
5248 #ifdef LOAD_EXTEND_OP
5249 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5254 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5255 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5256 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5257 inner = fold_convert (intermediate_type, inner);
5260 inner = build (RSHIFT_EXPR, intermediate_type,
5261 inner, size_int (bitnum));
5263 if (code == EQ_EXPR)
5264 inner = build (BIT_XOR_EXPR, intermediate_type,
5265 inner, integer_one_node);
5267 /* Put the AND last so it can combine with more things. */
5268 inner = build (BIT_AND_EXPR, intermediate_type,
5269 inner, integer_one_node);
5271 /* Make sure to return the proper type. */
5272 inner = fold_convert (result_type, inner);
5279 /* Check whether we are allowed to reorder operands arg0 and arg1,
5280 such that the evaluation of arg1 occurs before arg0. */
5283 reorder_operands_p (tree arg0, tree arg1)
5285 if (! flag_evaluation_order)
5287 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5289 return ! TREE_SIDE_EFFECTS (arg0)
5290 && ! TREE_SIDE_EFFECTS (arg1);
5293 /* Test whether it is preferable two swap two operands, ARG0 and
5294 ARG1, for example because ARG0 is an integer constant and ARG1
5295 isn't. If REORDER is true, only recommend swapping if we can
5296 evaluate the operands in reverse order. */
5299 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5301 STRIP_SIGN_NOPS (arg0);
5302 STRIP_SIGN_NOPS (arg1);
5304 if (TREE_CODE (arg1) == INTEGER_CST)
5306 if (TREE_CODE (arg0) == INTEGER_CST)
5309 if (TREE_CODE (arg1) == REAL_CST)
5311 if (TREE_CODE (arg0) == REAL_CST)
5314 if (TREE_CODE (arg1) == COMPLEX_CST)
5316 if (TREE_CODE (arg0) == COMPLEX_CST)
5319 if (TREE_CONSTANT (arg1))
5321 if (TREE_CONSTANT (arg0))
5327 if (reorder && flag_evaluation_order
5328 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5339 /* Perform constant folding and related simplification of EXPR.
5340 The related simplifications include x*1 => x, x*0 => 0, etc.,
5341 and application of the associative law.
5342 NOP_EXPR conversions may be removed freely (as long as we
5343 are careful not to change the C type of the overall expression)
5344 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5345 but we can constant-fold them if they have constant operands. */
5347 #ifdef ENABLE_FOLD_CHECKING
5348 # define fold(x) fold_1 (x)
5349 static tree fold_1 (tree);
5355 tree t = expr, orig_t;
5356 tree t1 = NULL_TREE;
5358 tree type = TREE_TYPE (expr);
5359 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5360 enum tree_code code = TREE_CODE (t);
5361 int kind = TREE_CODE_CLASS (code);
5363 /* WINS will be nonzero when the switch is done
5364 if all operands are constant. */
5367 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5368 Likewise for a SAVE_EXPR that's already been evaluated. */
5369 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5372 /* Return right away if a constant. */
5378 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5382 /* Special case for conversion ops that can have fixed point args. */
5383 arg0 = TREE_OPERAND (t, 0);
5385 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5387 STRIP_SIGN_NOPS (arg0);
5389 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5390 subop = TREE_REALPART (arg0);
5394 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5395 && TREE_CODE (subop) != REAL_CST)
5396 /* Note that TREE_CONSTANT isn't enough:
5397 static var addresses are constant but we can't
5398 do arithmetic on them. */
5401 else if (IS_EXPR_CODE_CLASS (kind))
5403 int len = first_rtl_op (code);
5405 for (i = 0; i < len; i++)
5407 tree op = TREE_OPERAND (t, i);
5411 continue; /* Valid for CALL_EXPR, at least. */
5413 if (kind == '<' || code == RSHIFT_EXPR)
5415 /* Signedness matters here. Perhaps we can refine this
5417 STRIP_SIGN_NOPS (op);
5420 /* Strip any conversions that don't change the mode. */
5423 if (TREE_CODE (op) == COMPLEX_CST)
5424 subop = TREE_REALPART (op);
5428 if (TREE_CODE (subop) != INTEGER_CST
5429 && TREE_CODE (subop) != REAL_CST)
5430 /* Note that TREE_CONSTANT isn't enough:
5431 static var addresses are constant but we can't
5432 do arithmetic on them. */
5442 /* If this is a commutative operation, and ARG0 is a constant, move it
5443 to ARG1 to reduce the number of tests below. */
5444 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5445 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5446 || code == BIT_AND_EXPR)
5447 && tree_swap_operands_p (arg0, arg1, true))
5448 return fold (build (code, type, TREE_OPERAND (t, 1),
5449 TREE_OPERAND (t, 0)));
5451 /* Now WINS is set as described above,
5452 ARG0 is the first operand of EXPR,
5453 and ARG1 is the second operand (if it has more than one operand).
5455 First check for cases where an arithmetic operation is applied to a
5456 compound, conditional, or comparison operation. Push the arithmetic
5457 operation inside the compound or conditional to see if any folding
5458 can then be done. Convert comparison to conditional for this purpose.
5459 The also optimizes non-constant cases that used to be done in
5462 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5463 one of the operands is a comparison and the other is a comparison, a
5464 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5465 code below would make the expression more complex. Change it to a
5466 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5467 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5469 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5470 || code == EQ_EXPR || code == NE_EXPR)
5471 && ((truth_value_p (TREE_CODE (arg0))
5472 && (truth_value_p (TREE_CODE (arg1))
5473 || (TREE_CODE (arg1) == BIT_AND_EXPR
5474 && integer_onep (TREE_OPERAND (arg1, 1)))))
5475 || (truth_value_p (TREE_CODE (arg1))
5476 && (truth_value_p (TREE_CODE (arg0))
5477 || (TREE_CODE (arg0) == BIT_AND_EXPR
5478 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5480 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5481 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5485 if (code == EQ_EXPR)
5486 t = invert_truthvalue (t);
5491 if (TREE_CODE_CLASS (code) == '1')
5493 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5494 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5495 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5496 else if (TREE_CODE (arg0) == COND_EXPR)
5498 tree arg01 = TREE_OPERAND (arg0, 1);
5499 tree arg02 = TREE_OPERAND (arg0, 2);
5500 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5501 arg01 = fold (build1 (code, type, arg01));
5502 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5503 arg02 = fold (build1 (code, type, arg02));
5504 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5507 /* If this was a conversion, and all we did was to move into
5508 inside the COND_EXPR, bring it back out. But leave it if
5509 it is a conversion from integer to integer and the
5510 result precision is no wider than a word since such a
5511 conversion is cheap and may be optimized away by combine,
5512 while it couldn't if it were outside the COND_EXPR. Then return
5513 so we don't get into an infinite recursion loop taking the
5514 conversion out and then back in. */
5516 if ((code == NOP_EXPR || code == CONVERT_EXPR
5517 || code == NON_LVALUE_EXPR)
5518 && TREE_CODE (t) == COND_EXPR
5519 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5520 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5521 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5522 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5523 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5524 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5525 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5527 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5528 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5529 t = build1 (code, type,
5531 TREE_TYPE (TREE_OPERAND
5532 (TREE_OPERAND (t, 1), 0)),
5533 TREE_OPERAND (t, 0),
5534 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5535 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5538 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5539 return fold (build (COND_EXPR, type, arg0,
5540 fold (build1 (code, type, integer_one_node)),
5541 fold (build1 (code, type, integer_zero_node))));
5543 else if (TREE_CODE_CLASS (code) == '<'
5544 && TREE_CODE (arg0) == COMPOUND_EXPR)
5545 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5546 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5547 else if (TREE_CODE_CLASS (code) == '<'
5548 && TREE_CODE (arg1) == COMPOUND_EXPR)
5549 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5550 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5551 else if (TREE_CODE_CLASS (code) == '2'
5552 || TREE_CODE_CLASS (code) == '<')
5554 if (TREE_CODE (arg1) == COMPOUND_EXPR
5555 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5556 && ! TREE_SIDE_EFFECTS (arg0))
5557 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5558 fold (build (code, type,
5559 arg0, TREE_OPERAND (arg1, 1))));
5560 else if ((TREE_CODE (arg1) == COND_EXPR
5561 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5562 && TREE_CODE_CLASS (code) != '<'))
5563 && (TREE_CODE (arg0) != COND_EXPR
5564 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5565 && (! TREE_SIDE_EFFECTS (arg0)
5566 || ((*lang_hooks.decls.global_bindings_p) () == 0
5567 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5569 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5570 /*cond_first_p=*/0);
5571 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5572 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5573 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5574 else if ((TREE_CODE (arg0) == COND_EXPR
5575 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5576 && TREE_CODE_CLASS (code) != '<'))
5577 && (TREE_CODE (arg1) != COND_EXPR
5578 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5579 && (! TREE_SIDE_EFFECTS (arg1)
5580 || ((*lang_hooks.decls.global_bindings_p) () == 0
5581 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5583 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5584 /*cond_first_p=*/1);
5598 return fold (DECL_INITIAL (t));
5603 case FIX_TRUNC_EXPR:
5604 /* Other kinds of FIX are not handled properly by fold_convert. */
5606 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5607 return TREE_OPERAND (t, 0);
5609 /* Handle cases of two conversions in a row. */
5610 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5611 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5613 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5614 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5615 tree final_type = TREE_TYPE (t);
5616 int inside_int = INTEGRAL_TYPE_P (inside_type);
5617 int inside_ptr = POINTER_TYPE_P (inside_type);
5618 int inside_float = FLOAT_TYPE_P (inside_type);
5619 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5620 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5621 int inter_int = INTEGRAL_TYPE_P (inter_type);
5622 int inter_ptr = POINTER_TYPE_P (inter_type);
5623 int inter_float = FLOAT_TYPE_P (inter_type);
5624 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5625 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5626 int final_int = INTEGRAL_TYPE_P (final_type);
5627 int final_ptr = POINTER_TYPE_P (final_type);
5628 int final_float = FLOAT_TYPE_P (final_type);
5629 unsigned int final_prec = TYPE_PRECISION (final_type);
5630 int final_unsignedp = TREE_UNSIGNED (final_type);
5632 /* In addition to the cases of two conversions in a row
5633 handled below, if we are converting something to its own
5634 type via an object of identical or wider precision, neither
5635 conversion is needed. */
5636 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5637 && ((inter_int && final_int) || (inter_float && final_float))
5638 && inter_prec >= final_prec)
5639 return fold (build1 (code, final_type,
5640 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5642 /* Likewise, if the intermediate and final types are either both
5643 float or both integer, we don't need the middle conversion if
5644 it is wider than the final type and doesn't change the signedness
5645 (for integers). Avoid this if the final type is a pointer
5646 since then we sometimes need the inner conversion. Likewise if
5647 the outer has a precision not equal to the size of its mode. */
5648 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5649 || (inter_float && inside_float))
5650 && inter_prec >= inside_prec
5651 && (inter_float || inter_unsignedp == inside_unsignedp)
5652 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5653 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5655 return fold (build1 (code, final_type,
5656 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5658 /* If we have a sign-extension of a zero-extended value, we can
5659 replace that by a single zero-extension. */
5660 if (inside_int && inter_int && final_int
5661 && inside_prec < inter_prec && inter_prec < final_prec
5662 && inside_unsignedp && !inter_unsignedp)
5663 return fold (build1 (code, final_type,
5664 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5666 /* Two conversions in a row are not needed unless:
5667 - some conversion is floating-point (overstrict for now), or
5668 - the intermediate type is narrower than both initial and
5670 - the intermediate type and innermost type differ in signedness,
5671 and the outermost type is wider than the intermediate, or
5672 - the initial type is a pointer type and the precisions of the
5673 intermediate and final types differ, or
5674 - the final type is a pointer type and the precisions of the
5675 initial and intermediate types differ. */
5676 if (! inside_float && ! inter_float && ! final_float
5677 && (inter_prec > inside_prec || inter_prec > final_prec)
5678 && ! (inside_int && inter_int
5679 && inter_unsignedp != inside_unsignedp
5680 && inter_prec < final_prec)
5681 && ((inter_unsignedp && inter_prec > inside_prec)
5682 == (final_unsignedp && final_prec > inter_prec))
5683 && ! (inside_ptr && inter_prec != final_prec)
5684 && ! (final_ptr && inside_prec != inter_prec)
5685 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5686 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5688 return fold (build1 (code, final_type,
5689 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5692 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5693 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5694 /* Detect assigning a bitfield. */
5695 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5696 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5698 /* Don't leave an assignment inside a conversion
5699 unless assigning a bitfield. */
5700 tree prev = TREE_OPERAND (t, 0);
5703 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5704 /* First do the assignment, then return converted constant. */
5705 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5706 TREE_NO_UNUSED_WARNING (t) = 1;
5711 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5712 constants (if x has signed type, the sign bit cannot be set
5713 in c). This folds extension into the BIT_AND_EXPR. */
5714 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5715 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5716 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5717 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5719 tree and = TREE_OPERAND (t, 0);
5720 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5723 if (TREE_UNSIGNED (TREE_TYPE (and))
5724 || (TYPE_PRECISION (TREE_TYPE (t))
5725 <= TYPE_PRECISION (TREE_TYPE (and))))
5727 else if (TYPE_PRECISION (TREE_TYPE (and1))
5728 <= HOST_BITS_PER_WIDE_INT
5729 && host_integerp (and1, 1))
5731 unsigned HOST_WIDE_INT cst;
5733 cst = tree_low_cst (and1, 1);
5734 cst &= (HOST_WIDE_INT) -1
5735 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5736 change = (cst == 0);
5737 #ifdef LOAD_EXTEND_OP
5739 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5742 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5743 and0 = fold_convert (uns, and0);
5744 and1 = fold_convert (uns, and1);
5749 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5750 fold_convert (TREE_TYPE (t), and0),
5751 fold_convert (TREE_TYPE (t), and1)));
5754 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5755 return tem ? tem : t;
5757 case VIEW_CONVERT_EXPR:
5758 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5759 return build1 (VIEW_CONVERT_EXPR, type,
5760 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5764 if (TREE_CODE (arg0) == CONSTRUCTOR
5765 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5767 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5774 if (TREE_CONSTANT (t) != wins)
5778 TREE_CONSTANT (t) = wins;
5783 if (negate_expr_p (arg0))
5784 return fold_convert (type, negate_expr (arg0));
5790 if (TREE_CODE (arg0) == INTEGER_CST)
5792 /* If the value is unsigned, then the absolute value is
5793 the same as the ordinary value. */
5794 if (TREE_UNSIGNED (type))
5796 /* Similarly, if the value is non-negative. */
5797 else if (INT_CST_LT (integer_minus_one_node, arg0))
5799 /* If the value is negative, then the absolute value is
5803 unsigned HOST_WIDE_INT low;
5805 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5806 TREE_INT_CST_HIGH (arg0),
5808 t = build_int_2 (low, high);
5809 TREE_TYPE (t) = type;
5811 = (TREE_OVERFLOW (arg0)
5812 | force_fit_type (t, overflow));
5813 TREE_CONSTANT_OVERFLOW (t)
5814 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5817 else if (TREE_CODE (arg0) == REAL_CST)
5819 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5820 t = build_real (type,
5821 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5824 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5825 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5826 /* Convert fabs((double)float) into (double)fabsf(float). */
5827 else if (TREE_CODE (arg0) == NOP_EXPR
5828 && TREE_CODE (type) == REAL_TYPE)
5830 tree targ0 = strip_float_extensions (arg0);
5832 return fold_convert (type, fold (build1 (ABS_EXPR,
5836 else if (tree_expr_nonnegative_p (arg0))
5841 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5842 return fold_convert (type, arg0);
5843 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5844 return build (COMPLEX_EXPR, type,
5845 TREE_OPERAND (arg0, 0),
5846 negate_expr (TREE_OPERAND (arg0, 1)));
5847 else if (TREE_CODE (arg0) == COMPLEX_CST)
5848 return build_complex (type, TREE_REALPART (arg0),
5849 negate_expr (TREE_IMAGPART (arg0)));
5850 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5851 return fold (build (TREE_CODE (arg0), type,
5852 fold (build1 (CONJ_EXPR, type,
5853 TREE_OPERAND (arg0, 0))),
5854 fold (build1 (CONJ_EXPR,
5855 type, TREE_OPERAND (arg0, 1)))));
5856 else if (TREE_CODE (arg0) == CONJ_EXPR)
5857 return TREE_OPERAND (arg0, 0);
5863 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5864 ~ TREE_INT_CST_HIGH (arg0));
5865 TREE_TYPE (t) = type;
5866 force_fit_type (t, 0);
5867 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5868 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5870 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5871 return TREE_OPERAND (arg0, 0);
5875 /* A + (-B) -> A - B */
5876 if (TREE_CODE (arg1) == NEGATE_EXPR)
5877 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5878 /* (-A) + B -> B - A */
5879 if (TREE_CODE (arg0) == NEGATE_EXPR)
5880 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5881 else if (! FLOAT_TYPE_P (type))
5883 if (integer_zerop (arg1))
5884 return non_lvalue (fold_convert (type, arg0));
5886 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5887 with a constant, and the two constants have no bits in common,
5888 we should treat this as a BIT_IOR_EXPR since this may produce more
5890 if (TREE_CODE (arg0) == BIT_AND_EXPR
5891 && TREE_CODE (arg1) == BIT_AND_EXPR
5892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5893 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5894 && integer_zerop (const_binop (BIT_AND_EXPR,
5895 TREE_OPERAND (arg0, 1),
5896 TREE_OPERAND (arg1, 1), 0)))
5898 code = BIT_IOR_EXPR;
5902 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5903 (plus (plus (mult) (mult)) (foo)) so that we can
5904 take advantage of the factoring cases below. */
5905 if ((TREE_CODE (arg0) == PLUS_EXPR
5906 && TREE_CODE (arg1) == MULT_EXPR)
5907 || (TREE_CODE (arg1) == PLUS_EXPR
5908 && TREE_CODE (arg0) == MULT_EXPR))
5910 tree parg0, parg1, parg, marg;
5912 if (TREE_CODE (arg0) == PLUS_EXPR)
5913 parg = arg0, marg = arg1;
5915 parg = arg1, marg = arg0;
5916 parg0 = TREE_OPERAND (parg, 0);
5917 parg1 = TREE_OPERAND (parg, 1);
5921 if (TREE_CODE (parg0) == MULT_EXPR
5922 && TREE_CODE (parg1) != MULT_EXPR)
5923 return fold (build (PLUS_EXPR, type,
5924 fold (build (PLUS_EXPR, type,
5925 fold_convert (type, parg0),
5926 fold_convert (type, marg))),
5927 fold_convert (type, parg1)));
5928 if (TREE_CODE (parg0) != MULT_EXPR
5929 && TREE_CODE (parg1) == MULT_EXPR)
5930 return fold (build (PLUS_EXPR, type,
5931 fold (build (PLUS_EXPR, type,
5932 fold_convert (type, parg1),
5933 fold_convert (type, marg))),
5934 fold_convert (type, parg0)));
5937 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5939 tree arg00, arg01, arg10, arg11;
5940 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5942 /* (A * C) + (B * C) -> (A+B) * C.
5943 We are most concerned about the case where C is a constant,
5944 but other combinations show up during loop reduction. Since
5945 it is not difficult, try all four possibilities. */
5947 arg00 = TREE_OPERAND (arg0, 0);
5948 arg01 = TREE_OPERAND (arg0, 1);
5949 arg10 = TREE_OPERAND (arg1, 0);
5950 arg11 = TREE_OPERAND (arg1, 1);
5953 if (operand_equal_p (arg01, arg11, 0))
5954 same = arg01, alt0 = arg00, alt1 = arg10;
5955 else if (operand_equal_p (arg00, arg10, 0))
5956 same = arg00, alt0 = arg01, alt1 = arg11;
5957 else if (operand_equal_p (arg00, arg11, 0))
5958 same = arg00, alt0 = arg01, alt1 = arg10;
5959 else if (operand_equal_p (arg01, arg10, 0))
5960 same = arg01, alt0 = arg00, alt1 = arg11;
5962 /* No identical multiplicands; see if we can find a common
5963 power-of-two factor in non-power-of-two multiplies. This
5964 can help in multi-dimensional array access. */
5965 else if (TREE_CODE (arg01) == INTEGER_CST
5966 && TREE_CODE (arg11) == INTEGER_CST
5967 && TREE_INT_CST_HIGH (arg01) == 0
5968 && TREE_INT_CST_HIGH (arg11) == 0)
5970 HOST_WIDE_INT int01, int11, tmp;
5971 int01 = TREE_INT_CST_LOW (arg01);
5972 int11 = TREE_INT_CST_LOW (arg11);
5974 /* Move min of absolute values to int11. */
5975 if ((int01 >= 0 ? int01 : -int01)
5976 < (int11 >= 0 ? int11 : -int11))
5978 tmp = int01, int01 = int11, int11 = tmp;
5979 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5980 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5983 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5985 alt0 = fold (build (MULT_EXPR, type, arg00,
5986 build_int_2 (int01 / int11, 0)));
5993 return fold (build (MULT_EXPR, type,
5994 fold (build (PLUS_EXPR, type, alt0, alt1)),
6000 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6001 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6002 return non_lvalue (fold_convert (type, arg0));
6004 /* Likewise if the operands are reversed. */
6005 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6006 return non_lvalue (fold_convert (type, arg1));
6008 /* Convert x+x into x*2.0. */
6009 if (operand_equal_p (arg0, arg1, 0)
6010 && SCALAR_FLOAT_TYPE_P (type))
6011 return fold (build (MULT_EXPR, type, arg0,
6012 build_real (type, dconst2)));
6014 /* Convert x*c+x into x*(c+1). */
6015 if (flag_unsafe_math_optimizations
6016 && TREE_CODE (arg0) == MULT_EXPR
6017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6018 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6023 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6024 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6025 return fold (build (MULT_EXPR, type, arg1,
6026 build_real (type, c)));
6029 /* Convert x+x*c into x*(c+1). */
6030 if (flag_unsafe_math_optimizations
6031 && TREE_CODE (arg1) == MULT_EXPR
6032 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6033 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6034 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6038 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6039 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6040 return fold (build (MULT_EXPR, type, arg0,
6041 build_real (type, c)));
6044 /* Convert x*c1+x*c2 into x*(c1+c2). */
6045 if (flag_unsafe_math_optimizations
6046 && TREE_CODE (arg0) == MULT_EXPR
6047 && TREE_CODE (arg1) == MULT_EXPR
6048 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6049 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6050 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6051 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6052 && operand_equal_p (TREE_OPERAND (arg0, 0),
6053 TREE_OPERAND (arg1, 0), 0))
6055 REAL_VALUE_TYPE c1, c2;
6057 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6058 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6059 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6060 return fold (build (MULT_EXPR, type,
6061 TREE_OPERAND (arg0, 0),
6062 build_real (type, c1)));
6067 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6068 is a rotate of A by C1 bits. */
6069 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6070 is a rotate of A by B bits. */
6072 enum tree_code code0, code1;
6073 code0 = TREE_CODE (arg0);
6074 code1 = TREE_CODE (arg1);
6075 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6076 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6077 && operand_equal_p (TREE_OPERAND (arg0, 0),
6078 TREE_OPERAND (arg1, 0), 0)
6079 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6081 tree tree01, tree11;
6082 enum tree_code code01, code11;
6084 tree01 = TREE_OPERAND (arg0, 1);
6085 tree11 = TREE_OPERAND (arg1, 1);
6086 STRIP_NOPS (tree01);
6087 STRIP_NOPS (tree11);
6088 code01 = TREE_CODE (tree01);
6089 code11 = TREE_CODE (tree11);
6090 if (code01 == INTEGER_CST
6091 && code11 == INTEGER_CST
6092 && TREE_INT_CST_HIGH (tree01) == 0
6093 && TREE_INT_CST_HIGH (tree11) == 0
6094 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6095 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6096 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6097 code0 == LSHIFT_EXPR ? tree01 : tree11);
6098 else if (code11 == MINUS_EXPR)
6100 tree tree110, tree111;
6101 tree110 = TREE_OPERAND (tree11, 0);
6102 tree111 = TREE_OPERAND (tree11, 1);
6103 STRIP_NOPS (tree110);
6104 STRIP_NOPS (tree111);
6105 if (TREE_CODE (tree110) == INTEGER_CST
6106 && 0 == compare_tree_int (tree110,
6108 (TREE_TYPE (TREE_OPERAND
6110 && operand_equal_p (tree01, tree111, 0))
6111 return build ((code0 == LSHIFT_EXPR
6114 type, TREE_OPERAND (arg0, 0), tree01);
6116 else if (code01 == MINUS_EXPR)
6118 tree tree010, tree011;
6119 tree010 = TREE_OPERAND (tree01, 0);
6120 tree011 = TREE_OPERAND (tree01, 1);
6121 STRIP_NOPS (tree010);
6122 STRIP_NOPS (tree011);
6123 if (TREE_CODE (tree010) == INTEGER_CST
6124 && 0 == compare_tree_int (tree010,
6126 (TREE_TYPE (TREE_OPERAND
6128 && operand_equal_p (tree11, tree011, 0))
6129 return build ((code0 != LSHIFT_EXPR
6132 type, TREE_OPERAND (arg0, 0), tree11);
6138 /* In most languages, can't associate operations on floats through
6139 parentheses. Rather than remember where the parentheses were, we
6140 don't associate floats at all, unless the user has specified
6141 -funsafe-math-optimizations. */
6144 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6146 tree var0, con0, lit0, minus_lit0;
6147 tree var1, con1, lit1, minus_lit1;
6149 /* Split both trees into variables, constants, and literals. Then
6150 associate each group together, the constants with literals,
6151 then the result with variables. This increases the chances of
6152 literals being recombined later and of generating relocatable
6153 expressions for the sum of a constant and literal. */
6154 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6155 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6156 code == MINUS_EXPR);
6158 /* Only do something if we found more than two objects. Otherwise,
6159 nothing has changed and we risk infinite recursion. */
6160 if (2 < ((var0 != 0) + (var1 != 0)
6161 + (con0 != 0) + (con1 != 0)
6162 + (lit0 != 0) + (lit1 != 0)
6163 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6165 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6166 if (code == MINUS_EXPR)
6169 var0 = associate_trees (var0, var1, code, type);
6170 con0 = associate_trees (con0, con1, code, type);
6171 lit0 = associate_trees (lit0, lit1, code, type);
6172 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6174 /* Preserve the MINUS_EXPR if the negative part of the literal is
6175 greater than the positive part. Otherwise, the multiplicative
6176 folding code (i.e extract_muldiv) may be fooled in case
6177 unsigned constants are subtracted, like in the following
6178 example: ((X*2 + 4) - 8U)/2. */
6179 if (minus_lit0 && lit0)
6181 if (TREE_CODE (lit0) == INTEGER_CST
6182 && TREE_CODE (minus_lit0) == INTEGER_CST
6183 && tree_int_cst_lt (lit0, minus_lit0))
6185 minus_lit0 = associate_trees (minus_lit0, lit0,
6191 lit0 = associate_trees (lit0, minus_lit0,
6199 return fold_convert (type,
6200 associate_trees (var0, minus_lit0,
6204 con0 = associate_trees (con0, minus_lit0,
6206 return fold_convert (type,
6207 associate_trees (var0, con0,
6212 con0 = associate_trees (con0, lit0, code, type);
6213 return fold_convert (type, associate_trees (var0, con0,
6220 t1 = const_binop (code, arg0, arg1, 0);
6221 if (t1 != NULL_TREE)
6223 /* The return value should always have
6224 the same type as the original expression. */
6225 if (TREE_TYPE (t1) != TREE_TYPE (t))
6226 t1 = fold_convert (TREE_TYPE (t), t1);
6233 /* A - (-B) -> A + B */
6234 if (TREE_CODE (arg1) == NEGATE_EXPR)
6235 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6236 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6237 if (TREE_CODE (arg0) == NEGATE_EXPR
6238 && (FLOAT_TYPE_P (type)
6239 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6240 && negate_expr_p (arg1)
6241 && reorder_operands_p (arg0, arg1))
6242 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6243 TREE_OPERAND (arg0, 0)));
6245 if (! FLOAT_TYPE_P (type))
6247 if (! wins && integer_zerop (arg0))
6248 return negate_expr (fold_convert (type, arg1));
6249 if (integer_zerop (arg1))
6250 return non_lvalue (fold_convert (type, arg0));
6252 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6253 about the case where C is a constant, just try one of the
6254 four possibilities. */
6256 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6257 && operand_equal_p (TREE_OPERAND (arg0, 1),
6258 TREE_OPERAND (arg1, 1), 0))
6259 return fold (build (MULT_EXPR, type,
6260 fold (build (MINUS_EXPR, type,
6261 TREE_OPERAND (arg0, 0),
6262 TREE_OPERAND (arg1, 0))),
6263 TREE_OPERAND (arg0, 1)));
6265 /* Fold A - (A & B) into ~B & A. */
6266 if (!TREE_SIDE_EFFECTS (arg0)
6267 && TREE_CODE (arg1) == BIT_AND_EXPR)
6269 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6270 return fold (build (BIT_AND_EXPR, type,
6271 fold (build1 (BIT_NOT_EXPR, type,
6272 TREE_OPERAND (arg1, 0))),
6274 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6275 return fold (build (BIT_AND_EXPR, type,
6276 fold (build1 (BIT_NOT_EXPR, type,
6277 TREE_OPERAND (arg1, 1))),
6281 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6282 any power of 2 minus 1. */
6283 if (TREE_CODE (arg0) == BIT_AND_EXPR
6284 && TREE_CODE (arg1) == BIT_AND_EXPR
6285 && operand_equal_p (TREE_OPERAND (arg0, 0),
6286 TREE_OPERAND (arg1, 0), 0))
6288 tree mask0 = TREE_OPERAND (arg0, 1);
6289 tree mask1 = TREE_OPERAND (arg1, 1);
6290 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6292 if (operand_equal_p (tem, mask1, 0))
6294 tem = fold (build (BIT_XOR_EXPR, type,
6295 TREE_OPERAND (arg0, 0), mask1));
6296 return fold (build (MINUS_EXPR, type, tem, mask1));
6301 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6302 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6303 return non_lvalue (fold_convert (type, arg0));
6305 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6306 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6307 (-ARG1 + ARG0) reduces to -ARG1. */
6308 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6309 return negate_expr (fold_convert (type, arg1));
6311 /* Fold &x - &x. This can happen from &x.foo - &x.
6312 This is unsafe for certain floats even in non-IEEE formats.
6313 In IEEE, it is unsafe because it does wrong for NaNs.
6314 Also note that operand_equal_p is always false if an operand
6317 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6318 && operand_equal_p (arg0, arg1, 0))
6319 return fold_convert (type, integer_zero_node);
6324 /* (-A) * (-B) -> A * B */
6325 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6326 return fold (build (MULT_EXPR, type,
6327 TREE_OPERAND (arg0, 0),
6328 negate_expr (arg1)));
6329 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6330 return fold (build (MULT_EXPR, type,
6332 TREE_OPERAND (arg1, 0)));
6334 if (! FLOAT_TYPE_P (type))
6336 if (integer_zerop (arg1))
6337 return omit_one_operand (type, arg1, arg0);
6338 if (integer_onep (arg1))
6339 return non_lvalue (fold_convert (type, arg0));
6341 /* (a * (1 << b)) is (a << b) */
6342 if (TREE_CODE (arg1) == LSHIFT_EXPR
6343 && integer_onep (TREE_OPERAND (arg1, 0)))
6344 return fold (build (LSHIFT_EXPR, type, arg0,
6345 TREE_OPERAND (arg1, 1)));
6346 if (TREE_CODE (arg0) == LSHIFT_EXPR
6347 && integer_onep (TREE_OPERAND (arg0, 0)))
6348 return fold (build (LSHIFT_EXPR, type, arg1,
6349 TREE_OPERAND (arg0, 1)));
6351 if (TREE_CODE (arg1) == INTEGER_CST
6352 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6353 fold_convert (type, arg1),
6355 return fold_convert (type, tem);
6360 /* Maybe fold x * 0 to 0. The expressions aren't the same
6361 when x is NaN, since x * 0 is also NaN. Nor are they the
6362 same in modes with signed zeros, since multiplying a
6363 negative value by 0 gives -0, not +0. */
6364 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6365 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6366 && real_zerop (arg1))
6367 return omit_one_operand (type, arg1, arg0);
6368 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6369 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6370 && real_onep (arg1))
6371 return non_lvalue (fold_convert (type, arg0));
6373 /* Transform x * -1.0 into -x. */
6374 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6375 && real_minus_onep (arg1))
6376 return fold (build1 (NEGATE_EXPR, type, arg0));
6378 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6379 if (flag_unsafe_math_optimizations
6380 && TREE_CODE (arg0) == RDIV_EXPR
6381 && TREE_CODE (arg1) == REAL_CST
6382 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6384 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6387 return fold (build (RDIV_EXPR, type, tem,
6388 TREE_OPERAND (arg0, 1)));
6391 if (flag_unsafe_math_optimizations)
6393 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6394 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6396 /* Optimizations of sqrt(...)*sqrt(...). */
6397 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6398 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6399 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6401 tree sqrtfn, arg, arglist;
6402 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6403 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6405 /* Optimize sqrt(x)*sqrt(x) as x. */
6406 if (operand_equal_p (arg00, arg10, 0)
6407 && ! HONOR_SNANS (TYPE_MODE (type)))
6410 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6411 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6412 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6413 arglist = build_tree_list (NULL_TREE, arg);
6414 return build_function_call_expr (sqrtfn, arglist);
6417 /* Optimize expN(x)*expN(y) as expN(x+y). */
6418 if (fcode0 == fcode1
6419 && (fcode0 == BUILT_IN_EXP
6420 || fcode0 == BUILT_IN_EXPF
6421 || fcode0 == BUILT_IN_EXPL
6422 || fcode0 == BUILT_IN_EXP2
6423 || fcode0 == BUILT_IN_EXP2F
6424 || fcode0 == BUILT_IN_EXP2L
6425 || fcode0 == BUILT_IN_EXP10
6426 || fcode0 == BUILT_IN_EXP10F
6427 || fcode0 == BUILT_IN_EXP10L
6428 || fcode0 == BUILT_IN_POW10
6429 || fcode0 == BUILT_IN_POW10F
6430 || fcode0 == BUILT_IN_POW10L))
6432 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6433 tree arg = build (PLUS_EXPR, type,
6434 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6435 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6436 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6437 return build_function_call_expr (expfn, arglist);
6440 /* Optimizations of pow(...)*pow(...). */
6441 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6442 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6443 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6445 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6446 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6448 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6449 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6452 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6453 if (operand_equal_p (arg01, arg11, 0))
6455 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6456 tree arg = build (MULT_EXPR, type, arg00, arg10);
6457 tree arglist = tree_cons (NULL_TREE, fold (arg),
6458 build_tree_list (NULL_TREE,
6460 return build_function_call_expr (powfn, arglist);
6463 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6464 if (operand_equal_p (arg00, arg10, 0))
6466 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6467 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6468 tree arglist = tree_cons (NULL_TREE, arg00,
6469 build_tree_list (NULL_TREE,
6471 return build_function_call_expr (powfn, arglist);
6475 /* Optimize tan(x)*cos(x) as sin(x). */
6476 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6477 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6478 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6479 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6480 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6481 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6482 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6483 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6491 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6495 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6499 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6505 if (sinfn != NULL_TREE)
6506 return build_function_call_expr (sinfn,
6507 TREE_OPERAND (arg0, 1));
6510 /* Optimize x*pow(x,c) as pow(x,c+1). */
6511 if (fcode1 == BUILT_IN_POW
6512 || fcode1 == BUILT_IN_POWF
6513 || fcode1 == BUILT_IN_POWL)
6515 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6516 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6518 if (TREE_CODE (arg11) == REAL_CST
6519 && ! TREE_CONSTANT_OVERFLOW (arg11)
6520 && operand_equal_p (arg0, arg10, 0))
6522 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6526 c = TREE_REAL_CST (arg11);
6527 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6528 arg = build_real (type, c);
6529 arglist = build_tree_list (NULL_TREE, arg);
6530 arglist = tree_cons (NULL_TREE, arg0, arglist);
6531 return build_function_call_expr (powfn, arglist);
6535 /* Optimize pow(x,c)*x as pow(x,c+1). */
6536 if (fcode0 == BUILT_IN_POW
6537 || fcode0 == BUILT_IN_POWF
6538 || fcode0 == BUILT_IN_POWL)
6540 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6541 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6543 if (TREE_CODE (arg01) == REAL_CST
6544 && ! TREE_CONSTANT_OVERFLOW (arg01)
6545 && operand_equal_p (arg1, arg00, 0))
6547 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6551 c = TREE_REAL_CST (arg01);
6552 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6553 arg = build_real (type, c);
6554 arglist = build_tree_list (NULL_TREE, arg);
6555 arglist = tree_cons (NULL_TREE, arg1, arglist);
6556 return build_function_call_expr (powfn, arglist);
6560 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6562 && operand_equal_p (arg0, arg1, 0))
6566 if (type == double_type_node)
6567 powfn = implicit_built_in_decls[BUILT_IN_POW];
6568 else if (type == float_type_node)
6569 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6570 else if (type == long_double_type_node)
6571 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6577 tree arg = build_real (type, dconst2);
6578 tree arglist = build_tree_list (NULL_TREE, arg);
6579 arglist = tree_cons (NULL_TREE, arg0, arglist);
6580 return build_function_call_expr (powfn, arglist);
6589 if (integer_all_onesp (arg1))
6590 return omit_one_operand (type, arg1, arg0);
6591 if (integer_zerop (arg1))
6592 return non_lvalue (fold_convert (type, arg0));
6593 t1 = distribute_bit_expr (code, type, arg0, arg1);
6594 if (t1 != NULL_TREE)
6597 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6599 This results in more efficient code for machines without a NAND
6600 instruction. Combine will canonicalize to the first form
6601 which will allow use of NAND instructions provided by the
6602 backend if they exist. */
6603 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6604 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6606 return fold (build1 (BIT_NOT_EXPR, type,
6607 build (BIT_AND_EXPR, type,
6608 TREE_OPERAND (arg0, 0),
6609 TREE_OPERAND (arg1, 0))));
6612 /* See if this can be simplified into a rotate first. If that
6613 is unsuccessful continue in the association code. */
6617 if (integer_zerop (arg1))
6618 return non_lvalue (fold_convert (type, arg0));
6619 if (integer_all_onesp (arg1))
6620 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6622 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6623 with a constant, and the two constants have no bits in common,
6624 we should treat this as a BIT_IOR_EXPR since this may produce more
6626 if (TREE_CODE (arg0) == BIT_AND_EXPR
6627 && TREE_CODE (arg1) == BIT_AND_EXPR
6628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6629 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6630 && integer_zerop (const_binop (BIT_AND_EXPR,
6631 TREE_OPERAND (arg0, 1),
6632 TREE_OPERAND (arg1, 1), 0)))
6634 code = BIT_IOR_EXPR;
6638 /* See if this can be simplified into a rotate first. If that
6639 is unsuccessful continue in the association code. */
6643 if (integer_all_onesp (arg1))
6644 return non_lvalue (fold_convert (type, arg0));
6645 if (integer_zerop (arg1))
6646 return omit_one_operand (type, arg1, arg0);
6647 t1 = distribute_bit_expr (code, type, arg0, arg1);
6648 if (t1 != NULL_TREE)
6650 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6651 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6652 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6655 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6657 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6658 && (~TREE_INT_CST_LOW (arg1)
6659 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6660 return fold_convert (type, TREE_OPERAND (arg0, 0));
6663 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6665 This results in more efficient code for machines without a NOR
6666 instruction. Combine will canonicalize to the first form
6667 which will allow use of NOR instructions provided by the
6668 backend if they exist. */
6669 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6670 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6672 return fold (build1 (BIT_NOT_EXPR, type,
6673 build (BIT_IOR_EXPR, type,
6674 TREE_OPERAND (arg0, 0),
6675 TREE_OPERAND (arg1, 0))));
6681 /* Don't touch a floating-point divide by zero unless the mode
6682 of the constant can represent infinity. */
6683 if (TREE_CODE (arg1) == REAL_CST
6684 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6685 && real_zerop (arg1))
6688 /* (-A) / (-B) -> A / B */
6689 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6690 return fold (build (RDIV_EXPR, type,
6691 TREE_OPERAND (arg0, 0),
6692 negate_expr (arg1)));
6693 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6694 return fold (build (RDIV_EXPR, type,
6696 TREE_OPERAND (arg1, 0)));
6698 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6699 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6700 && real_onep (arg1))
6701 return non_lvalue (fold_convert (type, arg0));
6703 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6704 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6705 && real_minus_onep (arg1))
6706 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6708 /* If ARG1 is a constant, we can convert this to a multiply by the
6709 reciprocal. This does not have the same rounding properties,
6710 so only do this if -funsafe-math-optimizations. We can actually
6711 always safely do it if ARG1 is a power of two, but it's hard to
6712 tell if it is or not in a portable manner. */
6713 if (TREE_CODE (arg1) == REAL_CST)
6715 if (flag_unsafe_math_optimizations
6716 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6718 return fold (build (MULT_EXPR, type, arg0, tem));
6719 /* Find the reciprocal if optimizing and the result is exact. */
6723 r = TREE_REAL_CST (arg1);
6724 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6726 tem = build_real (type, r);
6727 return fold (build (MULT_EXPR, type, arg0, tem));
6731 /* Convert A/B/C to A/(B*C). */
6732 if (flag_unsafe_math_optimizations
6733 && TREE_CODE (arg0) == RDIV_EXPR)
6734 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6735 fold (build (MULT_EXPR, type,
6736 TREE_OPERAND (arg0, 1), arg1))));
6738 /* Convert A/(B/C) to (A/B)*C. */
6739 if (flag_unsafe_math_optimizations
6740 && TREE_CODE (arg1) == RDIV_EXPR)
6741 return fold (build (MULT_EXPR, type,
6742 fold (build (RDIV_EXPR, type, arg0,
6743 TREE_OPERAND (arg1, 0))),
6744 TREE_OPERAND (arg1, 1)));
6746 /* Convert C1/(X*C2) into (C1/C2)/X. */
6747 if (flag_unsafe_math_optimizations
6748 && TREE_CODE (arg1) == MULT_EXPR
6749 && TREE_CODE (arg0) == REAL_CST
6750 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6752 tree tem = const_binop (RDIV_EXPR, arg0,
6753 TREE_OPERAND (arg1, 1), 0);
6755 return fold (build (RDIV_EXPR, type, tem,
6756 TREE_OPERAND (arg1, 0)));
6759 if (flag_unsafe_math_optimizations)
6761 enum built_in_function fcode = builtin_mathfn_code (arg1);
6762 /* Optimize x/expN(y) into x*expN(-y). */
6763 if (fcode == BUILT_IN_EXP
6764 || fcode == BUILT_IN_EXPF
6765 || fcode == BUILT_IN_EXPL
6766 || fcode == BUILT_IN_EXP2
6767 || fcode == BUILT_IN_EXP2F
6768 || fcode == BUILT_IN_EXP2L
6769 || fcode == BUILT_IN_EXP10
6770 || fcode == BUILT_IN_EXP10F
6771 || fcode == BUILT_IN_EXP10L
6772 || fcode == BUILT_IN_POW10
6773 || fcode == BUILT_IN_POW10F
6774 || fcode == BUILT_IN_POW10L)
6776 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6777 tree arg = build1 (NEGATE_EXPR, type,
6778 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6779 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6780 arg1 = build_function_call_expr (expfn, arglist);
6781 return fold (build (MULT_EXPR, type, arg0, arg1));
6784 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6785 if (fcode == BUILT_IN_POW
6786 || fcode == BUILT_IN_POWF
6787 || fcode == BUILT_IN_POWL)
6789 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6790 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6791 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6792 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6793 tree arglist = tree_cons(NULL_TREE, arg10,
6794 build_tree_list (NULL_TREE, neg11));
6795 arg1 = build_function_call_expr (powfn, arglist);
6796 return fold (build (MULT_EXPR, type, arg0, arg1));
6800 if (flag_unsafe_math_optimizations)
6802 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6803 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6805 /* Optimize sin(x)/cos(x) as tan(x). */
6806 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6807 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6808 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6809 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6810 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6814 if (fcode0 == BUILT_IN_SIN)
6815 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6816 else if (fcode0 == BUILT_IN_SINF)
6817 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6818 else if (fcode0 == BUILT_IN_SINL)
6819 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6823 if (tanfn != NULL_TREE)
6824 return build_function_call_expr (tanfn,
6825 TREE_OPERAND (arg0, 1));
6828 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6829 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6830 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6831 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6832 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6833 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6837 if (fcode0 == BUILT_IN_COS)
6838 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6839 else if (fcode0 == BUILT_IN_COSF)
6840 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6841 else if (fcode0 == BUILT_IN_COSL)
6842 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6846 if (tanfn != NULL_TREE)
6848 tree tmp = TREE_OPERAND (arg0, 1);
6849 tmp = build_function_call_expr (tanfn, tmp);
6850 return fold (build (RDIV_EXPR, type,
6851 build_real (type, dconst1),
6856 /* Optimize pow(x,c)/x as pow(x,c-1). */
6857 if (fcode0 == BUILT_IN_POW
6858 || fcode0 == BUILT_IN_POWF
6859 || fcode0 == BUILT_IN_POWL)
6861 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6862 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6863 if (TREE_CODE (arg01) == REAL_CST
6864 && ! TREE_CONSTANT_OVERFLOW (arg01)
6865 && operand_equal_p (arg1, arg00, 0))
6867 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6871 c = TREE_REAL_CST (arg01);
6872 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6873 arg = build_real (type, c);
6874 arglist = build_tree_list (NULL_TREE, arg);
6875 arglist = tree_cons (NULL_TREE, arg1, arglist);
6876 return build_function_call_expr (powfn, arglist);
6882 case TRUNC_DIV_EXPR:
6883 case ROUND_DIV_EXPR:
6884 case FLOOR_DIV_EXPR:
6886 case EXACT_DIV_EXPR:
6887 if (integer_onep (arg1))
6888 return non_lvalue (fold_convert (type, arg0));
6889 if (integer_zerop (arg1))
6892 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6893 operation, EXACT_DIV_EXPR.
6895 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6896 At one time others generated faster code, it's not clear if they do
6897 after the last round to changes to the DIV code in expmed.c. */
6898 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6899 && multiple_of_p (type, arg0, arg1))
6900 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6902 if (TREE_CODE (arg1) == INTEGER_CST
6903 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6905 return fold_convert (type, tem);
6910 case FLOOR_MOD_EXPR:
6911 case ROUND_MOD_EXPR:
6912 case TRUNC_MOD_EXPR:
6913 if (integer_onep (arg1))
6914 return omit_one_operand (type, integer_zero_node, arg0);
6915 if (integer_zerop (arg1))
6918 if (TREE_CODE (arg1) == INTEGER_CST
6919 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6921 return fold_convert (type, tem);
6927 if (integer_all_onesp (arg0))
6928 return omit_one_operand (type, arg0, arg1);
6932 /* Optimize -1 >> x for arithmetic right shifts. */
6933 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6934 return omit_one_operand (type, arg0, arg1);
6935 /* ... fall through ... */
6939 if (integer_zerop (arg1))
6940 return non_lvalue (fold_convert (type, arg0));
6941 if (integer_zerop (arg0))
6942 return omit_one_operand (type, arg0, arg1);
6944 /* Since negative shift count is not well-defined,
6945 don't try to compute it in the compiler. */
6946 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6948 /* Rewrite an LROTATE_EXPR by a constant into an
6949 RROTATE_EXPR by a new constant. */
6950 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6952 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6953 tem = fold_convert (TREE_TYPE (arg1), tem);
6954 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6955 return fold (build (RROTATE_EXPR, type, arg0, tem));
6958 /* If we have a rotate of a bit operation with the rotate count and
6959 the second operand of the bit operation both constant,
6960 permute the two operations. */
6961 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6962 && (TREE_CODE (arg0) == BIT_AND_EXPR
6963 || TREE_CODE (arg0) == BIT_IOR_EXPR
6964 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6966 return fold (build (TREE_CODE (arg0), type,
6967 fold (build (code, type,
6968 TREE_OPERAND (arg0, 0), arg1)),
6969 fold (build (code, type,
6970 TREE_OPERAND (arg0, 1), arg1))));
6972 /* Two consecutive rotates adding up to the width of the mode can
6974 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6975 && TREE_CODE (arg0) == RROTATE_EXPR
6976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6977 && TREE_INT_CST_HIGH (arg1) == 0
6978 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6979 && ((TREE_INT_CST_LOW (arg1)
6980 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6981 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6982 return TREE_OPERAND (arg0, 0);
6987 if (operand_equal_p (arg0, arg1, 0))
6988 return omit_one_operand (type, arg0, arg1);
6989 if (INTEGRAL_TYPE_P (type)
6990 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6991 return omit_one_operand (type, arg1, arg0);
6995 if (operand_equal_p (arg0, arg1, 0))
6996 return omit_one_operand (type, arg0, arg1);
6997 if (INTEGRAL_TYPE_P (type)
6998 && TYPE_MAX_VALUE (type)
6999 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7000 return omit_one_operand (type, arg1, arg0);
7003 case TRUTH_NOT_EXPR:
7004 /* Note that the operand of this must be an int
7005 and its values must be 0 or 1.
7006 ("true" is a fixed value perhaps depending on the language,
7007 but we don't handle values other than 1 correctly yet.) */
7008 tem = invert_truthvalue (arg0);
7009 /* Avoid infinite recursion. */
7010 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7012 tem = fold_single_bit_test (code, arg0, arg1, type);
7017 return fold_convert (type, tem);
7019 case TRUTH_ANDIF_EXPR:
7020 /* Note that the operands of this must be ints
7021 and their values must be 0 or 1.
7022 ("true" is a fixed value perhaps depending on the language.) */
7023 /* If first arg is constant zero, return it. */
7024 if (integer_zerop (arg0))
7025 return fold_convert (type, arg0);
7026 case TRUTH_AND_EXPR:
7027 /* If either arg is constant true, drop it. */
7028 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7029 return non_lvalue (fold_convert (type, arg1));
7030 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7031 /* Preserve sequence points. */
7032 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7033 return non_lvalue (fold_convert (type, arg0));
7034 /* If second arg is constant zero, result is zero, but first arg
7035 must be evaluated. */
7036 if (integer_zerop (arg1))
7037 return omit_one_operand (type, arg1, arg0);
7038 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7039 case will be handled here. */
7040 if (integer_zerop (arg0))
7041 return omit_one_operand (type, arg0, arg1);
7044 /* We only do these simplifications if we are optimizing. */
7048 /* Check for things like (A || B) && (A || C). We can convert this
7049 to A || (B && C). Note that either operator can be any of the four
7050 truth and/or operations and the transformation will still be
7051 valid. Also note that we only care about order for the
7052 ANDIF and ORIF operators. If B contains side effects, this
7053 might change the truth-value of A. */
7054 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7055 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7056 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7057 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7058 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7059 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7061 tree a00 = TREE_OPERAND (arg0, 0);
7062 tree a01 = TREE_OPERAND (arg0, 1);
7063 tree a10 = TREE_OPERAND (arg1, 0);
7064 tree a11 = TREE_OPERAND (arg1, 1);
7065 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7066 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7067 && (code == TRUTH_AND_EXPR
7068 || code == TRUTH_OR_EXPR));
7070 if (operand_equal_p (a00, a10, 0))
7071 return fold (build (TREE_CODE (arg0), type, a00,
7072 fold (build (code, type, a01, a11))));
7073 else if (commutative && operand_equal_p (a00, a11, 0))
7074 return fold (build (TREE_CODE (arg0), type, a00,
7075 fold (build (code, type, a01, a10))));
7076 else if (commutative && operand_equal_p (a01, a10, 0))
7077 return fold (build (TREE_CODE (arg0), type, a01,
7078 fold (build (code, type, a00, a11))));
7080 /* This case if tricky because we must either have commutative
7081 operators or else A10 must not have side-effects. */
7083 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7084 && operand_equal_p (a01, a11, 0))
7085 return fold (build (TREE_CODE (arg0), type,
7086 fold (build (code, type, a00, a10)),
7090 /* See if we can build a range comparison. */
7091 if (0 != (tem = fold_range_test (t)))
7094 /* Check for the possibility of merging component references. If our
7095 lhs is another similar operation, try to merge its rhs with our
7096 rhs. Then try to merge our lhs and rhs. */
7097 if (TREE_CODE (arg0) == code
7098 && 0 != (tem = fold_truthop (code, type,
7099 TREE_OPERAND (arg0, 1), arg1)))
7100 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7102 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7107 case TRUTH_ORIF_EXPR:
7108 /* Note that the operands of this must be ints
7109 and their values must be 0 or true.
7110 ("true" is a fixed value perhaps depending on the language.) */
7111 /* If first arg is constant true, return it. */
7112 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7113 return fold_convert (type, arg0);
7115 /* If either arg is constant zero, drop it. */
7116 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7117 return non_lvalue (fold_convert (type, arg1));
7118 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7119 /* Preserve sequence points. */
7120 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7121 return non_lvalue (fold_convert (type, arg0));
7122 /* If second arg is constant true, result is true, but we must
7123 evaluate first arg. */
7124 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7125 return omit_one_operand (type, arg1, arg0);
7126 /* Likewise for first arg, but note this only occurs here for
7128 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7129 return omit_one_operand (type, arg0, arg1);
7132 case TRUTH_XOR_EXPR:
7133 /* If either arg is constant zero, drop it. */
7134 if (integer_zerop (arg0))
7135 return non_lvalue (fold_convert (type, arg1));
7136 if (integer_zerop (arg1))
7137 return non_lvalue (fold_convert (type, arg0));
7138 /* If either arg is constant true, this is a logical inversion. */
7139 if (integer_onep (arg0))
7140 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7141 if (integer_onep (arg1))
7142 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7151 /* If one arg is a real or integer constant, put it last. */
7152 if (tree_swap_operands_p (arg0, arg1, true))
7153 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7155 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7157 tree targ0 = strip_float_extensions (arg0);
7158 tree targ1 = strip_float_extensions (arg1);
7159 tree newtype = TREE_TYPE (targ0);
7161 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7162 newtype = TREE_TYPE (targ1);
7164 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7165 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7166 return fold (build (code, type, fold_convert (newtype, targ0),
7167 fold_convert (newtype, targ1)));
7169 /* (-a) CMP (-b) -> b CMP a */
7170 if (TREE_CODE (arg0) == NEGATE_EXPR
7171 && TREE_CODE (arg1) == NEGATE_EXPR)
7172 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7173 TREE_OPERAND (arg0, 0)));
7175 if (TREE_CODE (arg1) == REAL_CST)
7177 REAL_VALUE_TYPE cst;
7178 cst = TREE_REAL_CST (arg1);
7180 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7181 if (TREE_CODE (arg0) == NEGATE_EXPR)
7183 fold (build (swap_tree_comparison (code), type,
7184 TREE_OPERAND (arg0, 0),
7185 build_real (TREE_TYPE (arg1),
7186 REAL_VALUE_NEGATE (cst))));
7188 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7189 /* a CMP (-0) -> a CMP 0 */
7190 if (REAL_VALUE_MINUS_ZERO (cst))
7191 return fold (build (code, type, arg0,
7192 build_real (TREE_TYPE (arg1), dconst0)));
7194 /* x != NaN is always true, other ops are always false. */
7195 if (REAL_VALUE_ISNAN (cst)
7196 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7198 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7199 return omit_one_operand (type, fold_convert (type, t), arg0);
7202 /* Fold comparisons against infinity. */
7203 if (REAL_VALUE_ISINF (cst))
7205 tem = fold_inf_compare (code, type, arg0, arg1);
7206 if (tem != NULL_TREE)
7211 /* If this is a comparison of a real constant with a PLUS_EXPR
7212 or a MINUS_EXPR of a real constant, we can convert it into a
7213 comparison with a revised real constant as long as no overflow
7214 occurs when unsafe_math_optimizations are enabled. */
7215 if (flag_unsafe_math_optimizations
7216 && TREE_CODE (arg1) == REAL_CST
7217 && (TREE_CODE (arg0) == PLUS_EXPR
7218 || TREE_CODE (arg0) == MINUS_EXPR)
7219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7220 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7221 ? MINUS_EXPR : PLUS_EXPR,
7222 arg1, TREE_OPERAND (arg0, 1), 0))
7223 && ! TREE_CONSTANT_OVERFLOW (tem))
7224 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7226 /* Likewise, we can simplify a comparison of a real constant with
7227 a MINUS_EXPR whose first operand is also a real constant, i.e.
7228 (c1 - x) < c2 becomes x > c1-c2. */
7229 if (flag_unsafe_math_optimizations
7230 && TREE_CODE (arg1) == REAL_CST
7231 && TREE_CODE (arg0) == MINUS_EXPR
7232 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7233 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7235 && ! TREE_CONSTANT_OVERFLOW (tem))
7236 return fold (build (swap_tree_comparison (code), type,
7237 TREE_OPERAND (arg0, 1), tem));
7239 /* Fold comparisons against built-in math functions. */
7240 if (TREE_CODE (arg1) == REAL_CST
7241 && flag_unsafe_math_optimizations
7242 && ! flag_errno_math)
7244 enum built_in_function fcode = builtin_mathfn_code (arg0);
7246 if (fcode != END_BUILTINS)
7248 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7249 if (tem != NULL_TREE)
7255 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7256 if (TREE_CONSTANT (arg1)
7257 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7258 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7259 /* This optimization is invalid for ordered comparisons
7260 if CONST+INCR overflows or if foo+incr might overflow.
7261 This optimization is invalid for floating point due to rounding.
7262 For pointer types we assume overflow doesn't happen. */
7263 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7264 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7265 && (code == EQ_EXPR || code == NE_EXPR))))
7267 tree varop, newconst;
7269 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7271 newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7272 arg1, TREE_OPERAND (arg0, 1)));
7273 varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7274 TREE_OPERAND (arg0, 0),
7275 TREE_OPERAND (arg0, 1));
7279 newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7280 arg1, TREE_OPERAND (arg0, 1)));
7281 varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7282 TREE_OPERAND (arg0, 0),
7283 TREE_OPERAND (arg0, 1));
7287 /* If VAROP is a reference to a bitfield, we must mask
7288 the constant by the width of the field. */
7289 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7290 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7292 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7293 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7294 tree folded_compare, shift;
7296 /* First check whether the comparison would come out
7297 always the same. If we don't do that we would
7298 change the meaning with the masking. */
7299 folded_compare = fold (build (code, type,
7300 TREE_OPERAND (varop, 0),
7302 if (integer_zerop (folded_compare)
7303 || integer_onep (folded_compare))
7304 return omit_one_operand (type, folded_compare, varop);
7306 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7308 newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7310 newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7314 return fold (build (code, type, varop, newconst));
7317 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7318 This transformation affects the cases which are handled in later
7319 optimizations involving comparisons with non-negative constants. */
7320 if (TREE_CODE (arg1) == INTEGER_CST
7321 && TREE_CODE (arg0) != INTEGER_CST
7322 && tree_int_cst_sgn (arg1) > 0)
7327 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7328 return fold (build (GT_EXPR, type, arg0, arg1));
7331 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7332 return fold (build (LE_EXPR, type, arg0, arg1));
7339 /* Comparisons with the highest or lowest possible integer of
7340 the specified size will have known values. */
7342 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7344 if (TREE_CODE (arg1) == INTEGER_CST
7345 && ! TREE_CONSTANT_OVERFLOW (arg1)
7346 && width <= HOST_BITS_PER_WIDE_INT
7347 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7348 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7350 unsigned HOST_WIDE_INT signed_max;
7351 unsigned HOST_WIDE_INT max, min;
7353 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7355 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7357 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7363 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7366 if (TREE_INT_CST_HIGH (arg1) == 0
7367 && TREE_INT_CST_LOW (arg1) == max)
7371 return omit_one_operand (type,
7376 return fold (build (EQ_EXPR, type, arg0, arg1));
7379 return omit_one_operand (type,
7384 return fold (build (NE_EXPR, type, arg0, arg1));
7386 /* The GE_EXPR and LT_EXPR cases above are not normally
7387 reached because of previous transformations. */
7392 else if (TREE_INT_CST_HIGH (arg1) == 0
7393 && TREE_INT_CST_LOW (arg1) == max - 1)
7397 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7398 return fold (build (EQ_EXPR, type, arg0, arg1));
7400 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7401 return fold (build (NE_EXPR, type, arg0, arg1));
7405 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7406 && TREE_INT_CST_LOW (arg1) == min)
7410 return omit_one_operand (type,
7415 return fold (build (EQ_EXPR, type, arg0, arg1));
7418 return omit_one_operand (type,
7423 return fold (build (NE_EXPR, type, arg0, arg1));
7428 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7429 && TREE_INT_CST_LOW (arg1) == min + 1)
7433 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7434 return fold (build (NE_EXPR, type, arg0, arg1));
7436 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7437 return fold (build (EQ_EXPR, type, arg0, arg1));
7442 else if (TREE_INT_CST_HIGH (arg1) == 0
7443 && TREE_INT_CST_LOW (arg1) == signed_max
7444 && TREE_UNSIGNED (TREE_TYPE (arg1))
7445 /* signed_type does not work on pointer types. */
7446 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7448 /* The following case also applies to X < signed_max+1
7449 and X >= signed_max+1 because previous transformations. */
7450 if (code == LE_EXPR || code == GT_EXPR)
7453 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7454 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7456 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7457 type, fold_convert (st0, arg0),
7458 fold_convert (st1, integer_zero_node)));
7464 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7465 a MINUS_EXPR of a constant, we can convert it into a comparison with
7466 a revised constant as long as no overflow occurs. */
7467 if ((code == EQ_EXPR || code == NE_EXPR)
7468 && TREE_CODE (arg1) == INTEGER_CST
7469 && (TREE_CODE (arg0) == PLUS_EXPR
7470 || TREE_CODE (arg0) == MINUS_EXPR)
7471 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7472 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7473 ? MINUS_EXPR : PLUS_EXPR,
7474 arg1, TREE_OPERAND (arg0, 1), 0))
7475 && ! TREE_CONSTANT_OVERFLOW (tem))
7476 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7478 /* Similarly for a NEGATE_EXPR. */
7479 else if ((code == EQ_EXPR || code == NE_EXPR)
7480 && TREE_CODE (arg0) == NEGATE_EXPR
7481 && TREE_CODE (arg1) == INTEGER_CST
7482 && 0 != (tem = negate_expr (arg1))
7483 && TREE_CODE (tem) == INTEGER_CST
7484 && ! TREE_CONSTANT_OVERFLOW (tem))
7485 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7487 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7488 for !=. Don't do this for ordered comparisons due to overflow. */
7489 else if ((code == NE_EXPR || code == EQ_EXPR)
7490 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7491 return fold (build (code, type,
7492 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7494 /* If we are widening one operand of an integer comparison,
7495 see if the other operand is similarly being widened. Perhaps we
7496 can do the comparison in the narrower type. */
7497 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7498 && TREE_CODE (arg0) == NOP_EXPR
7499 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7500 && (code == EQ_EXPR || code == NE_EXPR
7501 || TREE_UNSIGNED (TREE_TYPE (arg0))
7502 == TREE_UNSIGNED (TREE_TYPE (tem)))
7503 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7504 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7505 || (TREE_CODE (t1) == INTEGER_CST
7506 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7507 return fold (build (code, type, tem,
7508 fold_convert (TREE_TYPE (tem), t1)));
7510 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7511 constant, we can simplify it. */
7512 else if (TREE_CODE (arg1) == INTEGER_CST
7513 && (TREE_CODE (arg0) == MIN_EXPR
7514 || TREE_CODE (arg0) == MAX_EXPR)
7515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7516 return optimize_minmax_comparison (t);
7518 /* If we are comparing an ABS_EXPR with a constant, we can
7519 convert all the cases into explicit comparisons, but they may
7520 well not be faster than doing the ABS and one comparison.
7521 But ABS (X) <= C is a range comparison, which becomes a subtraction
7522 and a comparison, and is probably faster. */
7523 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7524 && TREE_CODE (arg0) == ABS_EXPR
7525 && ! TREE_SIDE_EFFECTS (arg0)
7526 && (0 != (tem = negate_expr (arg1)))
7527 && TREE_CODE (tem) == INTEGER_CST
7528 && ! TREE_CONSTANT_OVERFLOW (tem))
7529 return fold (build (TRUTH_ANDIF_EXPR, type,
7530 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7531 build (LE_EXPR, type,
7532 TREE_OPERAND (arg0, 0), arg1)));
7534 /* If this is an EQ or NE comparison with zero and ARG0 is
7535 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7536 two operations, but the latter can be done in one less insn
7537 on machines that have only two-operand insns or on which a
7538 constant cannot be the first operand. */
7539 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7540 && TREE_CODE (arg0) == BIT_AND_EXPR)
7542 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7543 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7545 fold (build (code, type,
7546 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7548 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7549 TREE_OPERAND (arg0, 1),
7550 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7551 fold_convert (TREE_TYPE (arg0),
7554 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7555 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7557 fold (build (code, type,
7558 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7560 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7561 TREE_OPERAND (arg0, 0),
7562 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7563 fold_convert (TREE_TYPE (arg0),
7568 /* If this is an NE or EQ comparison of zero against the result of a
7569 signed MOD operation whose second operand is a power of 2, make
7570 the MOD operation unsigned since it is simpler and equivalent. */
7571 if ((code == NE_EXPR || code == EQ_EXPR)
7572 && integer_zerop (arg1)
7573 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7574 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7575 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7576 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7577 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7578 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7580 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7581 tree newmod = build (TREE_CODE (arg0), newtype,
7582 fold_convert (newtype,
7583 TREE_OPERAND (arg0, 0)),
7584 fold_convert (newtype,
7585 TREE_OPERAND (arg0, 1)));
7587 return build (code, type, newmod, fold_convert (newtype, arg1));
7590 /* If this is an NE comparison of zero with an AND of one, remove the
7591 comparison since the AND will give the correct value. */
7592 if (code == NE_EXPR && integer_zerop (arg1)
7593 && TREE_CODE (arg0) == BIT_AND_EXPR
7594 && integer_onep (TREE_OPERAND (arg0, 1)))
7595 return fold_convert (type, arg0);
7597 /* If we have (A & C) == C where C is a power of 2, convert this into
7598 (A & C) != 0. Similarly for NE_EXPR. */
7599 if ((code == EQ_EXPR || code == NE_EXPR)
7600 && TREE_CODE (arg0) == BIT_AND_EXPR
7601 && integer_pow2p (TREE_OPERAND (arg0, 1))
7602 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7603 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7604 arg0, integer_zero_node));
7606 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7607 2, then fold the expression into shifts and logical operations. */
7608 tem = fold_single_bit_test (code, arg0, arg1, type);
7612 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7613 Similarly for NE_EXPR. */
7614 if ((code == EQ_EXPR || code == NE_EXPR)
7615 && TREE_CODE (arg0) == BIT_AND_EXPR
7616 && TREE_CODE (arg1) == INTEGER_CST
7617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7620 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7621 arg1, build1 (BIT_NOT_EXPR,
7622 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7623 TREE_OPERAND (arg0, 1))));
7624 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7625 if (integer_nonzerop (dandnotc))
7626 return omit_one_operand (type, rslt, arg0);
7629 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7630 Similarly for NE_EXPR. */
7631 if ((code == EQ_EXPR || code == NE_EXPR)
7632 && TREE_CODE (arg0) == BIT_IOR_EXPR
7633 && TREE_CODE (arg1) == INTEGER_CST
7634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7637 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7638 TREE_OPERAND (arg0, 1),
7639 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7640 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7641 if (integer_nonzerop (candnotd))
7642 return omit_one_operand (type, rslt, arg0);
7645 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7646 and similarly for >= into !=. */
7647 if ((code == LT_EXPR || code == GE_EXPR)
7648 && TREE_UNSIGNED (TREE_TYPE (arg0))
7649 && TREE_CODE (arg1) == LSHIFT_EXPR
7650 && integer_onep (TREE_OPERAND (arg1, 0)))
7651 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7652 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7653 TREE_OPERAND (arg1, 1)),
7654 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7656 else if ((code == LT_EXPR || code == GE_EXPR)
7657 && TREE_UNSIGNED (TREE_TYPE (arg0))
7658 && (TREE_CODE (arg1) == NOP_EXPR
7659 || TREE_CODE (arg1) == CONVERT_EXPR)
7660 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7661 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7663 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7664 fold_convert (TREE_TYPE (arg0),
7665 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7666 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7668 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7670 /* Simplify comparison of something with itself. (For IEEE
7671 floating-point, we can only do some of these simplifications.) */
7672 if (operand_equal_p (arg0, arg1, 0))
7677 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7678 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7679 return constant_boolean_node (1, type);
7684 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7685 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7686 return constant_boolean_node (1, type);
7687 return fold (build (EQ_EXPR, type, arg0, arg1));
7690 /* For NE, we can only do this simplification if integer
7691 or we don't honor IEEE floating point NaNs. */
7692 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7693 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7695 /* ... fall through ... */
7698 return constant_boolean_node (0, type);
7704 /* If we are comparing an expression that just has comparisons
7705 of two integer values, arithmetic expressions of those comparisons,
7706 and constants, we can simplify it. There are only three cases
7707 to check: the two values can either be equal, the first can be
7708 greater, or the second can be greater. Fold the expression for
7709 those three values. Since each value must be 0 or 1, we have
7710 eight possibilities, each of which corresponds to the constant 0
7711 or 1 or one of the six possible comparisons.
7713 This handles common cases like (a > b) == 0 but also handles
7714 expressions like ((x > y) - (y > x)) > 0, which supposedly
7715 occur in macroized code. */
7717 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7719 tree cval1 = 0, cval2 = 0;
7722 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7723 /* Don't handle degenerate cases here; they should already
7724 have been handled anyway. */
7725 && cval1 != 0 && cval2 != 0
7726 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7727 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7728 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7729 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7730 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7731 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7732 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7734 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7735 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7737 /* We can't just pass T to eval_subst in case cval1 or cval2
7738 was the same as ARG1. */
7741 = fold (build (code, type,
7742 eval_subst (arg0, cval1, maxval, cval2, minval),
7745 = fold (build (code, type,
7746 eval_subst (arg0, cval1, maxval, cval2, maxval),
7749 = fold (build (code, type,
7750 eval_subst (arg0, cval1, minval, cval2, maxval),
7753 /* All three of these results should be 0 or 1. Confirm they
7754 are. Then use those values to select the proper code
7757 if ((integer_zerop (high_result)
7758 || integer_onep (high_result))
7759 && (integer_zerop (equal_result)
7760 || integer_onep (equal_result))
7761 && (integer_zerop (low_result)
7762 || integer_onep (low_result)))
7764 /* Make a 3-bit mask with the high-order bit being the
7765 value for `>', the next for '=', and the low for '<'. */
7766 switch ((integer_onep (high_result) * 4)
7767 + (integer_onep (equal_result) * 2)
7768 + integer_onep (low_result))
7772 return omit_one_operand (type, integer_zero_node, arg0);
7793 return omit_one_operand (type, integer_one_node, arg0);
7796 t = build (code, type, cval1, cval2);
7798 return save_expr (t);
7805 /* If this is a comparison of a field, we may be able to simplify it. */
7806 if (((TREE_CODE (arg0) == COMPONENT_REF
7807 && (*lang_hooks.can_use_bit_fields_p) ())
7808 || TREE_CODE (arg0) == BIT_FIELD_REF)
7809 && (code == EQ_EXPR || code == NE_EXPR)
7810 /* Handle the constant case even without -O
7811 to make sure the warnings are given. */
7812 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7814 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7819 /* If this is a comparison of complex values and either or both sides
7820 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7821 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7822 This may prevent needless evaluations. */
7823 if ((code == EQ_EXPR || code == NE_EXPR)
7824 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7825 && (TREE_CODE (arg0) == COMPLEX_EXPR
7826 || TREE_CODE (arg1) == COMPLEX_EXPR
7827 || TREE_CODE (arg0) == COMPLEX_CST
7828 || TREE_CODE (arg1) == COMPLEX_CST))
7830 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7831 tree real0, imag0, real1, imag1;
7833 arg0 = save_expr (arg0);
7834 arg1 = save_expr (arg1);
7835 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7836 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7837 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7838 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7840 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7843 fold (build (code, type, real0, real1)),
7844 fold (build (code, type, imag0, imag1))));
7847 /* Optimize comparisons of strlen vs zero to a compare of the
7848 first character of the string vs zero. To wit,
7849 strlen(ptr) == 0 => *ptr == 0
7850 strlen(ptr) != 0 => *ptr != 0
7851 Other cases should reduce to one of these two (or a constant)
7852 due to the return value of strlen being unsigned. */
7853 if ((code == EQ_EXPR || code == NE_EXPR)
7854 && integer_zerop (arg1)
7855 && TREE_CODE (arg0) == CALL_EXPR)
7857 tree fndecl = get_callee_fndecl (arg0);
7861 && DECL_BUILT_IN (fndecl)
7862 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7863 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7864 && (arglist = TREE_OPERAND (arg0, 1))
7865 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7866 && ! TREE_CHAIN (arglist))
7867 return fold (build (code, type,
7868 build1 (INDIRECT_REF, char_type_node,
7869 TREE_VALUE(arglist)),
7870 integer_zero_node));
7873 /* From here on, the only cases we handle are when the result is
7874 known to be a constant.
7876 To compute GT, swap the arguments and do LT.
7877 To compute GE, do LT and invert the result.
7878 To compute LE, swap the arguments, do LT and invert the result.
7879 To compute NE, do EQ and invert the result.
7881 Therefore, the code below must handle only EQ and LT. */
7883 if (code == LE_EXPR || code == GT_EXPR)
7885 tem = arg0, arg0 = arg1, arg1 = tem;
7886 code = swap_tree_comparison (code);
7889 /* Note that it is safe to invert for real values here because we
7890 will check below in the one case that it matters. */
7894 if (code == NE_EXPR || code == GE_EXPR)
7897 code = invert_tree_comparison (code);
7900 /* Compute a result for LT or EQ if args permit;
7901 otherwise return T. */
7902 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7904 if (code == EQ_EXPR)
7905 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7907 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7908 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7909 : INT_CST_LT (arg0, arg1)),
7913 #if 0 /* This is no longer useful, but breaks some real code. */
7914 /* Assume a nonexplicit constant cannot equal an explicit one,
7915 since such code would be undefined anyway.
7916 Exception: on sysvr4, using #pragma weak,
7917 a label can come out as 0. */
7918 else if (TREE_CODE (arg1) == INTEGER_CST
7919 && !integer_zerop (arg1)
7920 && TREE_CONSTANT (arg0)
7921 && TREE_CODE (arg0) == ADDR_EXPR
7923 t1 = build_int_2 (0, 0);
7925 /* Two real constants can be compared explicitly. */
7926 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7928 /* If either operand is a NaN, the result is false with two
7929 exceptions: First, an NE_EXPR is true on NaNs, but that case
7930 is already handled correctly since we will be inverting the
7931 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7932 or a GE_EXPR into a LT_EXPR, we must return true so that it
7933 will be inverted into false. */
7935 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7936 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7937 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7939 else if (code == EQ_EXPR)
7940 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7941 TREE_REAL_CST (arg1)),
7944 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7945 TREE_REAL_CST (arg1)),
7949 if (t1 == NULL_TREE)
7953 TREE_INT_CST_LOW (t1) ^= 1;
7955 TREE_TYPE (t1) = type;
7956 if (TREE_CODE (type) == BOOLEAN_TYPE)
7957 return (*lang_hooks.truthvalue_conversion) (t1);
7961 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7962 so all simple results must be passed through pedantic_non_lvalue. */
7963 if (TREE_CODE (arg0) == INTEGER_CST)
7965 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7966 /* Only optimize constant conditions when the selected branch
7967 has the same type as the COND_EXPR. This avoids optimizing
7968 away "c ? x : throw", where the throw has a void type. */
7969 if (! VOID_TYPE_P (TREE_TYPE (tem))
7970 || VOID_TYPE_P (TREE_TYPE (t)))
7971 return pedantic_non_lvalue (tem);
7974 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7975 return pedantic_omit_one_operand (type, arg1, arg0);
7977 /* If we have A op B ? A : C, we may be able to convert this to a
7978 simpler expression, depending on the operation and the values
7979 of B and C. Signed zeros prevent all of these transformations,
7980 for reasons given above each one. */
7982 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7983 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7984 arg1, TREE_OPERAND (arg0, 1))
7985 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7987 tree arg2 = TREE_OPERAND (t, 2);
7988 enum tree_code comp_code = TREE_CODE (arg0);
7992 /* If we have A op 0 ? A : -A, consider applying the following
7995 A == 0? A : -A same as -A
7996 A != 0? A : -A same as A
7997 A >= 0? A : -A same as abs (A)
7998 A > 0? A : -A same as abs (A)
7999 A <= 0? A : -A same as -abs (A)
8000 A < 0? A : -A same as -abs (A)
8002 None of these transformations work for modes with signed
8003 zeros. If A is +/-0, the first two transformations will
8004 change the sign of the result (from +0 to -0, or vice
8005 versa). The last four will fix the sign of the result,
8006 even though the original expressions could be positive or
8007 negative, depending on the sign of A.
8009 Note that all these transformations are correct if A is
8010 NaN, since the two alternatives (A and -A) are also NaNs. */
8011 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8012 ? real_zerop (TREE_OPERAND (arg0, 1))
8013 : integer_zerop (TREE_OPERAND (arg0, 1)))
8014 && TREE_CODE (arg2) == NEGATE_EXPR
8015 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8019 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8020 tem = fold_convert (type, negate_expr (tem));
8021 return pedantic_non_lvalue (tem);
8023 return pedantic_non_lvalue (fold_convert (type, arg1));
8026 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8027 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8028 (TREE_TYPE (arg1)), arg1);
8029 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8030 return pedantic_non_lvalue (fold_convert (type, arg1));
8033 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8034 arg1 = fold_convert ((lang_hooks.types.signed_type)
8035 (TREE_TYPE (arg1)), arg1);
8036 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8037 arg1 = negate_expr (fold_convert (type, arg1));
8038 return pedantic_non_lvalue (arg1);
8043 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8044 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8045 both transformations are correct when A is NaN: A != 0
8046 is then true, and A == 0 is false. */
8048 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8050 if (comp_code == NE_EXPR)
8051 return pedantic_non_lvalue (fold_convert (type, arg1));
8052 else if (comp_code == EQ_EXPR)
8053 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8056 /* Try some transformations of A op B ? A : B.
8058 A == B? A : B same as B
8059 A != B? A : B same as A
8060 A >= B? A : B same as max (A, B)
8061 A > B? A : B same as max (B, A)
8062 A <= B? A : B same as min (A, B)
8063 A < B? A : B same as min (B, A)
8065 As above, these transformations don't work in the presence
8066 of signed zeros. For example, if A and B are zeros of
8067 opposite sign, the first two transformations will change
8068 the sign of the result. In the last four, the original
8069 expressions give different results for (A=+0, B=-0) and
8070 (A=-0, B=+0), but the transformed expressions do not.
8072 The first two transformations are correct if either A or B
8073 is a NaN. In the first transformation, the condition will
8074 be false, and B will indeed be chosen. In the case of the
8075 second transformation, the condition A != B will be true,
8076 and A will be chosen.
8078 The conversions to max() and min() are not correct if B is
8079 a number and A is not. The conditions in the original
8080 expressions will be false, so all four give B. The min()
8081 and max() versions would give a NaN instead. */
8082 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8083 arg2, TREE_OPERAND (arg0, 0)))
8085 tree comp_op0 = TREE_OPERAND (arg0, 0);
8086 tree comp_op1 = TREE_OPERAND (arg0, 1);
8087 tree comp_type = TREE_TYPE (comp_op0);
8089 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8090 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8100 return pedantic_non_lvalue (fold_convert (type, arg2));
8102 return pedantic_non_lvalue (fold_convert (type, arg1));
8105 /* In C++ a ?: expression can be an lvalue, so put the
8106 operand which will be used if they are equal first
8107 so that we can convert this back to the
8108 corresponding COND_EXPR. */
8109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8110 return pedantic_non_lvalue (fold_convert
8111 (type, fold (build (MIN_EXPR, comp_type,
8112 (comp_code == LE_EXPR
8113 ? comp_op0 : comp_op1),
8114 (comp_code == LE_EXPR
8115 ? comp_op1 : comp_op0)))));
8119 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8120 return pedantic_non_lvalue (fold_convert
8121 (type, fold (build (MAX_EXPR, comp_type,
8122 (comp_code == GE_EXPR
8123 ? comp_op0 : comp_op1),
8124 (comp_code == GE_EXPR
8125 ? comp_op1 : comp_op0)))));
8132 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8133 we might still be able to simplify this. For example,
8134 if C1 is one less or one more than C2, this might have started
8135 out as a MIN or MAX and been transformed by this function.
8136 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8138 if (INTEGRAL_TYPE_P (type)
8139 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8140 && TREE_CODE (arg2) == INTEGER_CST)
8144 /* We can replace A with C1 in this case. */
8145 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8146 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8147 TREE_OPERAND (t, 2)));
8150 /* If C1 is C2 + 1, this is min(A, C2). */
8151 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8152 && operand_equal_p (TREE_OPERAND (arg0, 1),
8153 const_binop (PLUS_EXPR, arg2,
8154 integer_one_node, 0), 1))
8155 return pedantic_non_lvalue
8156 (fold (build (MIN_EXPR, type, arg1, arg2)));
8160 /* If C1 is C2 - 1, this is min(A, C2). */
8161 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8162 && operand_equal_p (TREE_OPERAND (arg0, 1),
8163 const_binop (MINUS_EXPR, arg2,
8164 integer_one_node, 0), 1))
8165 return pedantic_non_lvalue
8166 (fold (build (MIN_EXPR, type, arg1, arg2)));
8170 /* If C1 is C2 - 1, this is max(A, C2). */
8171 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8172 && operand_equal_p (TREE_OPERAND (arg0, 1),
8173 const_binop (MINUS_EXPR, arg2,
8174 integer_one_node, 0), 1))
8175 return pedantic_non_lvalue
8176 (fold (build (MAX_EXPR, type, arg1, arg2)));
8180 /* If C1 is C2 + 1, this is max(A, C2). */
8181 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8182 && operand_equal_p (TREE_OPERAND (arg0, 1),
8183 const_binop (PLUS_EXPR, arg2,
8184 integer_one_node, 0), 1))
8185 return pedantic_non_lvalue
8186 (fold (build (MAX_EXPR, type, arg1, arg2)));
8195 /* If the second operand is simpler than the third, swap them
8196 since that produces better jump optimization results. */
8197 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8198 TREE_OPERAND (t, 2), false))
8200 /* See if this can be inverted. If it can't, possibly because
8201 it was a floating-point inequality comparison, don't do
8203 tem = invert_truthvalue (arg0);
8205 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8206 return fold (build (code, type, tem,
8207 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8210 /* Convert A ? 1 : 0 to simply A. */
8211 if (integer_onep (TREE_OPERAND (t, 1))
8212 && integer_zerop (TREE_OPERAND (t, 2))
8213 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8214 call to fold will try to move the conversion inside
8215 a COND, which will recurse. In that case, the COND_EXPR
8216 is probably the best choice, so leave it alone. */
8217 && type == TREE_TYPE (arg0))
8218 return pedantic_non_lvalue (arg0);
8220 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8221 over COND_EXPR in cases such as floating point comparisons. */
8222 if (integer_zerop (TREE_OPERAND (t, 1))
8223 && integer_onep (TREE_OPERAND (t, 2))
8224 && truth_value_p (TREE_CODE (arg0)))
8225 return pedantic_non_lvalue (fold_convert (type,
8226 invert_truthvalue (arg0)));
8228 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8229 operation is simply A & 2. */
8231 if (integer_zerop (TREE_OPERAND (t, 2))
8232 && TREE_CODE (arg0) == NE_EXPR
8233 && integer_zerop (TREE_OPERAND (arg0, 1))
8234 && integer_pow2p (arg1)
8235 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8236 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8238 return pedantic_non_lvalue (fold_convert (type,
8239 TREE_OPERAND (arg0, 0)));
8241 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8242 if (integer_zerop (TREE_OPERAND (t, 2))
8243 && truth_value_p (TREE_CODE (arg0))
8244 && truth_value_p (TREE_CODE (arg1)))
8245 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8248 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8249 if (integer_onep (TREE_OPERAND (t, 2))
8250 && truth_value_p (TREE_CODE (arg0))
8251 && truth_value_p (TREE_CODE (arg1)))
8253 /* Only perform transformation if ARG0 is easily inverted. */
8254 tem = invert_truthvalue (arg0);
8255 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8256 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8263 /* When pedantic, a compound expression can be neither an lvalue
8264 nor an integer constant expression. */
8265 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8267 /* Don't let (0, 0) be null pointer constant. */
8268 if (integer_zerop (arg1))
8269 return build1 (NOP_EXPR, type, arg1);
8270 return fold_convert (type, arg1);
8274 return build_complex (type, arg0, arg1);
8278 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8280 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8281 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8282 TREE_OPERAND (arg0, 1));
8283 else if (TREE_CODE (arg0) == COMPLEX_CST)
8284 return TREE_REALPART (arg0);
8285 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8286 return fold (build (TREE_CODE (arg0), type,
8287 fold (build1 (REALPART_EXPR, type,
8288 TREE_OPERAND (arg0, 0))),
8289 fold (build1 (REALPART_EXPR,
8290 type, TREE_OPERAND (arg0, 1)))));
8294 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8295 return fold_convert (type, integer_zero_node);
8296 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8297 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8298 TREE_OPERAND (arg0, 0));
8299 else if (TREE_CODE (arg0) == COMPLEX_CST)
8300 return TREE_IMAGPART (arg0);
8301 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8302 return fold (build (TREE_CODE (arg0), type,
8303 fold (build1 (IMAGPART_EXPR, type,
8304 TREE_OPERAND (arg0, 0))),
8305 fold (build1 (IMAGPART_EXPR, type,
8306 TREE_OPERAND (arg0, 1)))));
8309 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8311 case CLEANUP_POINT_EXPR:
8312 if (! has_cleanups (arg0))
8313 return TREE_OPERAND (t, 0);
8316 enum tree_code code0 = TREE_CODE (arg0);
8317 int kind0 = TREE_CODE_CLASS (code0);
8318 tree arg00 = TREE_OPERAND (arg0, 0);
8321 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8322 return fold (build1 (code0, type,
8323 fold (build1 (CLEANUP_POINT_EXPR,
8324 TREE_TYPE (arg00), arg00))));
8326 if (kind0 == '<' || kind0 == '2'
8327 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8328 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8329 || code0 == TRUTH_XOR_EXPR)
8331 arg01 = TREE_OPERAND (arg0, 1);
8333 if (TREE_CONSTANT (arg00)
8334 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8335 && ! has_cleanups (arg00)))
8336 return fold (build (code0, type, arg00,
8337 fold (build1 (CLEANUP_POINT_EXPR,
8338 TREE_TYPE (arg01), arg01))));
8340 if (TREE_CONSTANT (arg01))
8341 return fold (build (code0, type,
8342 fold (build1 (CLEANUP_POINT_EXPR,
8343 TREE_TYPE (arg00), arg00)),
8351 /* Check for a built-in function. */
8352 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8353 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8355 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8357 tree tmp = fold_builtin (expr);
8365 } /* switch (code) */
8368 #ifdef ENABLE_FOLD_CHECKING
8371 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8372 static void fold_check_failed (tree, tree);
8373 void print_fold_checksum (tree);
8375 /* When --enable-checking=fold, compute a digest of expr before
8376 and after actual fold call to see if fold did not accidentally
8377 change original expr. */
8384 unsigned char checksum_before[16], checksum_after[16];
8387 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8388 md5_init_ctx (&ctx);
8389 fold_checksum_tree (expr, &ctx, ht);
8390 md5_finish_ctx (&ctx, checksum_before);
8393 ret = fold_1 (expr);
8395 md5_init_ctx (&ctx);
8396 fold_checksum_tree (expr, &ctx, ht);
8397 md5_finish_ctx (&ctx, checksum_after);
8400 if (memcmp (checksum_before, checksum_after, 16))
8401 fold_check_failed (expr, ret);
8407 print_fold_checksum (tree expr)
8410 unsigned char checksum[16], cnt;
8413 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8414 md5_init_ctx (&ctx);
8415 fold_checksum_tree (expr, &ctx, ht);
8416 md5_finish_ctx (&ctx, checksum);
8418 for (cnt = 0; cnt < 16; ++cnt)
8419 fprintf (stderr, "%02x", checksum[cnt]);
8420 putc ('\n', stderr);
8424 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8426 internal_error ("fold check: original tree changed by fold");
8430 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8433 enum tree_code code;
8434 char buf[sizeof (struct tree_decl)];
8437 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8438 > sizeof (struct tree_decl)
8439 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8443 slot = htab_find_slot (ht, expr, INSERT);
8447 code = TREE_CODE (expr);
8448 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8450 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8451 memcpy (buf, expr, tree_size (expr));
8453 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8455 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8457 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8458 memcpy (buf, expr, tree_size (expr));
8460 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8462 else if (TREE_CODE_CLASS (code) == 't'
8463 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8465 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8466 memcpy (buf, expr, tree_size (expr));
8468 TYPE_POINTER_TO (expr) = NULL;
8469 TYPE_REFERENCE_TO (expr) = NULL;
8471 md5_process_bytes (expr, tree_size (expr), ctx);
8472 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8473 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8474 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8475 len = TREE_CODE_LENGTH (code);
8476 switch (TREE_CODE_CLASS (code))
8482 md5_process_bytes (TREE_STRING_POINTER (expr),
8483 TREE_STRING_LENGTH (expr), ctx);
8486 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8487 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8490 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8500 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8501 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8504 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8505 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8514 case SAVE_EXPR: len = 2; break;
8515 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8516 case RTL_EXPR: len = 0; break;
8517 case WITH_CLEANUP_EXPR: len = 2; break;
8526 for (i = 0; i < len; ++i)
8527 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8530 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8531 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8532 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8533 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8534 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8535 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8536 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8537 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8538 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8539 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8540 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8543 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8544 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8545 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8546 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8547 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8548 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8549 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8550 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8551 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8552 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8561 /* Perform constant folding and related simplification of initializer
8562 expression EXPR. This behaves identically to "fold" but ignores
8563 potential run-time traps and exceptions that fold must preserve. */
8566 fold_initializer (tree expr)
8568 int saved_signaling_nans = flag_signaling_nans;
8569 int saved_trapping_math = flag_trapping_math;
8570 int saved_trapv = flag_trapv;
8573 flag_signaling_nans = 0;
8574 flag_trapping_math = 0;
8577 result = fold (expr);
8579 flag_signaling_nans = saved_signaling_nans;
8580 flag_trapping_math = saved_trapping_math;
8581 flag_trapv = saved_trapv;
8586 /* Determine if first argument is a multiple of second argument. Return 0 if
8587 it is not, or we cannot easily determined it to be.
8589 An example of the sort of thing we care about (at this point; this routine
8590 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8591 fold cases do now) is discovering that
8593 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8599 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8601 This code also handles discovering that
8603 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8605 is a multiple of 8 so we don't have to worry about dealing with a
8608 Note that we *look* inside a SAVE_EXPR only to determine how it was
8609 calculated; it is not safe for fold to do much of anything else with the
8610 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8611 at run time. For example, the latter example above *cannot* be implemented
8612 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8613 evaluation time of the original SAVE_EXPR is not necessarily the same at
8614 the time the new expression is evaluated. The only optimization of this
8615 sort that would be valid is changing
8617 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8621 SAVE_EXPR (I) * SAVE_EXPR (J)
8623 (where the same SAVE_EXPR (J) is used in the original and the
8624 transformed version). */
8627 multiple_of_p (tree type, tree top, tree bottom)
8629 if (operand_equal_p (top, bottom, 0))
8632 if (TREE_CODE (type) != INTEGER_TYPE)
8635 switch (TREE_CODE (top))
8638 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8639 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8643 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8644 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8647 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8651 op1 = TREE_OPERAND (top, 1);
8652 /* const_binop may not detect overflow correctly,
8653 so check for it explicitly here. */
8654 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8655 > TREE_INT_CST_LOW (op1)
8656 && TREE_INT_CST_HIGH (op1) == 0
8657 && 0 != (t1 = fold_convert (type,
8658 const_binop (LSHIFT_EXPR,
8661 && ! TREE_OVERFLOW (t1))
8662 return multiple_of_p (type, t1, bottom);
8667 /* Can't handle conversions from non-integral or wider integral type. */
8668 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8669 || (TYPE_PRECISION (type)
8670 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8673 /* .. fall through ... */
8676 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8679 if (TREE_CODE (bottom) != INTEGER_CST
8680 || (TREE_UNSIGNED (type)
8681 && (tree_int_cst_sgn (top) < 0
8682 || tree_int_cst_sgn (bottom) < 0)))
8684 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8692 /* Return true if `t' is known to be non-negative. */
8695 tree_expr_nonnegative_p (tree t)
8697 switch (TREE_CODE (t))
8703 return tree_int_cst_sgn (t) >= 0;
8706 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8709 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8710 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8711 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8713 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8714 both unsigned and at least 2 bits shorter than the result. */
8715 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8716 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8717 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8719 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8720 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8721 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8722 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8724 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8725 TYPE_PRECISION (inner2)) + 1;
8726 return prec < TYPE_PRECISION (TREE_TYPE (t));
8732 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8734 /* x * x for floating point x is always non-negative. */
8735 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8737 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8738 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8741 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8742 both unsigned and their total bits is shorter than the result. */
8743 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8744 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8745 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8747 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8748 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8749 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8750 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8751 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8752 < TYPE_PRECISION (TREE_TYPE (t));
8756 case TRUNC_DIV_EXPR:
8758 case FLOOR_DIV_EXPR:
8759 case ROUND_DIV_EXPR:
8760 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8761 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8763 case TRUNC_MOD_EXPR:
8765 case FLOOR_MOD_EXPR:
8766 case ROUND_MOD_EXPR:
8767 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8771 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8775 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8776 tree outer_type = TREE_TYPE (t);
8778 if (TREE_CODE (outer_type) == REAL_TYPE)
8780 if (TREE_CODE (inner_type) == REAL_TYPE)
8781 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8782 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8784 if (TREE_UNSIGNED (inner_type))
8786 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8789 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8791 if (TREE_CODE (inner_type) == REAL_TYPE)
8792 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8793 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8794 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8795 && TREE_UNSIGNED (inner_type);
8801 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8802 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8804 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8806 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8807 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8809 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8810 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8812 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8814 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8816 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8817 case NON_LVALUE_EXPR:
8818 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8820 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8822 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8826 tree fndecl = get_callee_fndecl (t);
8827 tree arglist = TREE_OPERAND (t, 1);
8829 && DECL_BUILT_IN (fndecl)
8830 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8831 switch (DECL_FUNCTION_CODE (fndecl))
8834 case BUILT_IN_CABSL:
8835 case BUILT_IN_CABSF:
8840 case BUILT_IN_EXP2F:
8841 case BUILT_IN_EXP2L:
8842 case BUILT_IN_EXP10:
8843 case BUILT_IN_EXP10F:
8844 case BUILT_IN_EXP10L:
8846 case BUILT_IN_FABSF:
8847 case BUILT_IN_FABSL:
8850 case BUILT_IN_FFSLL:
8851 case BUILT_IN_PARITY:
8852 case BUILT_IN_PARITYL:
8853 case BUILT_IN_PARITYLL:
8854 case BUILT_IN_POPCOUNT:
8855 case BUILT_IN_POPCOUNTL:
8856 case BUILT_IN_POPCOUNTLL:
8857 case BUILT_IN_POW10:
8858 case BUILT_IN_POW10F:
8859 case BUILT_IN_POW10L:
8861 case BUILT_IN_SQRTF:
8862 case BUILT_IN_SQRTL:
8866 case BUILT_IN_ATANF:
8867 case BUILT_IN_ATANL:
8869 case BUILT_IN_CEILF:
8870 case BUILT_IN_CEILL:
8871 case BUILT_IN_FLOOR:
8872 case BUILT_IN_FLOORF:
8873 case BUILT_IN_FLOORL:
8874 case BUILT_IN_NEARBYINT:
8875 case BUILT_IN_NEARBYINTF:
8876 case BUILT_IN_NEARBYINTL:
8877 case BUILT_IN_ROUND:
8878 case BUILT_IN_ROUNDF:
8879 case BUILT_IN_ROUNDL:
8880 case BUILT_IN_TRUNC:
8881 case BUILT_IN_TRUNCF:
8882 case BUILT_IN_TRUNCL:
8883 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8888 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8895 /* ... fall through ... */
8898 if (truth_value_p (TREE_CODE (t)))
8899 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8903 /* We don't know sign of `t', so be conservative and return false. */
8907 /* Return true if `r' is known to be non-negative.
8908 Only handles constants at the moment. */
8911 rtl_expr_nonnegative_p (rtx r)
8913 switch (GET_CODE (r))
8916 return INTVAL (r) >= 0;
8919 if (GET_MODE (r) == VOIDmode)
8920 return CONST_DOUBLE_HIGH (r) >= 0;
8928 units = CONST_VECTOR_NUNITS (r);
8930 for (i = 0; i < units; ++i)
8932 elt = CONST_VECTOR_ELT (r, i);
8933 if (!rtl_expr_nonnegative_p (elt))
8942 /* These are always nonnegative. */
8950 #include "gt-fold-const.h"