1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t) != INTEGER_CST)
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
270 h = h1 + h2 + (l < l1);
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 return (*hv & h1) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
327 for (j = 0; j < 4; j++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
838 /* Determine whether an expression T can be cheaply negated using
839 the function negate_expr. */
842 negate_expr_p (tree t)
844 unsigned HOST_WIDE_INT val;
851 type = TREE_TYPE (t);
854 switch (TREE_CODE (t))
857 if (TREE_UNSIGNED (type) || ! flag_trapv)
860 /* Check that -CST will not overflow type. */
861 prec = TYPE_PRECISION (type);
862 if (prec > HOST_BITS_PER_WIDE_INT)
864 if (TREE_INT_CST_LOW (t) != 0)
866 prec -= HOST_BITS_PER_WIDE_INT;
867 val = TREE_INT_CST_HIGH (t);
870 val = TREE_INT_CST_LOW (t);
871 if (prec < HOST_BITS_PER_WIDE_INT)
872 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
880 return negate_expr_p (TREE_REALPART (t))
881 && negate_expr_p (TREE_IMAGPART (t));
884 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
885 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1));
890 if (TREE_UNSIGNED (TREE_TYPE (t)))
896 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897 return negate_expr_p (TREE_OPERAND (t, 1))
898 || negate_expr_p (TREE_OPERAND (t, 0));
902 /* Negate -((double)float) as (double)(-float). */
903 if (TREE_CODE (type) == REAL_TYPE)
905 tree tem = strip_float_extensions (t);
907 return negate_expr_p (tem);
912 /* Negate -f(x) as f(-x). */
913 if (negate_mathfn_p (builtin_mathfn_code (t)))
914 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
923 /* Given T, an expression, return the negation of T. Allow for T to be
924 null, in which case return null. */
935 type = TREE_TYPE (t);
938 switch (TREE_CODE (t))
942 unsigned HOST_WIDE_INT low;
944 int overflow = neg_double (TREE_INT_CST_LOW (t),
945 TREE_INT_CST_HIGH (t),
947 tem = build_int_2 (low, high);
948 TREE_TYPE (tem) = type;
951 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952 TREE_CONSTANT_OVERFLOW (tem)
953 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
955 if (! TREE_OVERFLOW (tem)
956 || TREE_UNSIGNED (type)
962 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963 /* Two's complement FP formats, such as c4x, may overflow. */
964 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965 return fold_convert (type, tem);
970 tree rpart = negate_expr (TREE_REALPART (t));
971 tree ipart = negate_expr (TREE_IMAGPART (t));
973 if ((TREE_CODE (rpart) == REAL_CST
974 && TREE_CODE (ipart) == REAL_CST)
975 || (TREE_CODE (rpart) == INTEGER_CST
976 && TREE_CODE (ipart) == INTEGER_CST))
977 return build_complex (type, rpart, ipart);
982 return fold_convert (type, TREE_OPERAND (t, 0));
985 /* - (A - B) -> B - A */
986 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988 return fold_convert (type,
989 fold (build (MINUS_EXPR, TREE_TYPE (t),
991 TREE_OPERAND (t, 0))));
995 if (TREE_UNSIGNED (TREE_TYPE (t)))
1001 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1003 tem = TREE_OPERAND (t, 1);
1004 if (negate_expr_p (tem))
1005 return fold_convert (type,
1006 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007 TREE_OPERAND (t, 0),
1008 negate_expr (tem))));
1009 tem = TREE_OPERAND (t, 0);
1010 if (negate_expr_p (tem))
1011 return fold_convert (type,
1012 fold (build (TREE_CODE (t), TREE_TYPE (t),
1014 TREE_OPERAND (t, 1))));
1019 /* Convert -((double)float) into (double)(-float). */
1020 if (TREE_CODE (type) == REAL_TYPE)
1022 tem = strip_float_extensions (t);
1023 if (tem != t && negate_expr_p (tem))
1024 return fold_convert (type, negate_expr (tem));
1029 /* Negate -f(x) as f(-x). */
1030 if (negate_mathfn_p (builtin_mathfn_code (t))
1031 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1033 tree fndecl, arg, arglist;
1035 fndecl = get_callee_fndecl (t);
1036 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037 arglist = build_tree_list (NULL_TREE, arg);
1038 return build_function_call_expr (fndecl, arglist);
1046 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047 return fold_convert (type, tem);
1050 /* Split a tree IN into a constant, literal and variable parts that could be
1051 combined with CODE to make IN. "constant" means an expression with
1052 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1053 commutative arithmetic operation. Store the constant part into *CONP,
1054 the literal in *LITP and return the variable part. If a part isn't
1055 present, set it to null. If the tree does not decompose in this way,
1056 return the entire tree as the variable part and the other parts as null.
1058 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1059 case, we negate an operand that was subtracted. Except if it is a
1060 literal for which we use *MINUS_LITP instead.
1062 If NEGATE_P is true, we are negating all of IN, again except a literal
1063 for which we use *MINUS_LITP instead.
1065 If IN is itself a literal or constant, return it as appropriate.
1067 Note that we do not guarantee that any of the three values will be the
1068 same type as IN, but they will have the same signedness and mode. */
1071 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072 tree *minus_litp, int negate_p)
1080 /* Strip any conversions that don't change the machine mode or signedness. */
1081 STRIP_SIGN_NOPS (in);
1083 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1085 else if (TREE_CODE (in) == code
1086 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087 /* We can associate addition and subtraction together (even
1088 though the C standard doesn't say so) for integers because
1089 the value is not affected. For reals, the value might be
1090 affected, so we can't. */
1091 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1094 tree op0 = TREE_OPERAND (in, 0);
1095 tree op1 = TREE_OPERAND (in, 1);
1096 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1099 /* First see if either of the operands is a literal, then a constant. */
1100 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101 *litp = op0, op0 = 0;
1102 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1105 if (op0 != 0 && TREE_CONSTANT (op0))
1106 *conp = op0, op0 = 0;
1107 else if (op1 != 0 && TREE_CONSTANT (op1))
1108 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1110 /* If we haven't dealt with either operand, this is not a case we can
1111 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1112 if (op0 != 0 && op1 != 0)
1117 var = op1, neg_var_p = neg1_p;
1119 /* Now do any needed negations. */
1121 *minus_litp = *litp, *litp = 0;
1123 *conp = negate_expr (*conp);
1125 var = negate_expr (var);
1127 else if (TREE_CONSTANT (in))
1135 *minus_litp = *litp, *litp = 0;
1136 else if (*minus_litp)
1137 *litp = *minus_litp, *minus_litp = 0;
1138 *conp = negate_expr (*conp);
1139 var = negate_expr (var);
1145 /* Re-associate trees split by the above function. T1 and T2 are either
1146 expressions to associate or null. Return the new expression, if any. If
1147 we build an operation, do it in TYPE and with CODE. */
1150 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1157 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158 try to fold this since we will have infinite recursion. But do
1159 deal with any NEGATE_EXPRs. */
1160 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1163 if (code == PLUS_EXPR)
1165 if (TREE_CODE (t1) == NEGATE_EXPR)
1166 return build (MINUS_EXPR, type, fold_convert (type, t2),
1167 fold_convert (type, TREE_OPERAND (t1, 0)));
1168 else if (TREE_CODE (t2) == NEGATE_EXPR)
1169 return build (MINUS_EXPR, type, fold_convert (type, t1),
1170 fold_convert (type, TREE_OPERAND (t2, 0)));
1172 return build (code, type, fold_convert (type, t1),
1173 fold_convert (type, t2));
1176 return fold (build (code, type, fold_convert (type, t1),
1177 fold_convert (type, t2)));
1180 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1181 to produce a new constant.
1183 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1186 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1188 unsigned HOST_WIDE_INT int1l, int2l;
1189 HOST_WIDE_INT int1h, int2h;
1190 unsigned HOST_WIDE_INT low;
1192 unsigned HOST_WIDE_INT garbagel;
1193 HOST_WIDE_INT garbageh;
1195 tree type = TREE_TYPE (arg1);
1196 int uns = TREE_UNSIGNED (type);
1198 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1200 int no_overflow = 0;
1202 int1l = TREE_INT_CST_LOW (arg1);
1203 int1h = TREE_INT_CST_HIGH (arg1);
1204 int2l = TREE_INT_CST_LOW (arg2);
1205 int2h = TREE_INT_CST_HIGH (arg2);
1210 low = int1l | int2l, hi = int1h | int2h;
1214 low = int1l ^ int2l, hi = int1h ^ int2h;
1218 low = int1l & int2l, hi = int1h & int2h;
1224 /* It's unclear from the C standard whether shifts can overflow.
1225 The following code ignores overflow; perhaps a C standard
1226 interpretation ruling is needed. */
1227 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1235 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1240 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1244 neg_double (int2l, int2h, &low, &hi);
1245 add_double (int1l, int1h, low, hi, &low, &hi);
1246 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1250 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1253 case TRUNC_DIV_EXPR:
1254 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255 case EXACT_DIV_EXPR:
1256 /* This is a shortcut for a common special case. */
1257 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258 && ! TREE_CONSTANT_OVERFLOW (arg1)
1259 && ! TREE_CONSTANT_OVERFLOW (arg2)
1260 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1262 if (code == CEIL_DIV_EXPR)
1265 low = int1l / int2l, hi = 0;
1269 /* ... fall through ... */
1271 case ROUND_DIV_EXPR:
1272 if (int2h == 0 && int2l == 1)
1274 low = int1l, hi = int1h;
1277 if (int1l == int2l && int1h == int2h
1278 && ! (int1l == 0 && int1h == 0))
1283 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284 &low, &hi, &garbagel, &garbageh);
1287 case TRUNC_MOD_EXPR:
1288 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289 /* This is a shortcut for a common special case. */
1290 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291 && ! TREE_CONSTANT_OVERFLOW (arg1)
1292 && ! TREE_CONSTANT_OVERFLOW (arg2)
1293 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1295 if (code == CEIL_MOD_EXPR)
1297 low = int1l % int2l, hi = 0;
1301 /* ... fall through ... */
1303 case ROUND_MOD_EXPR:
1304 overflow = div_and_round_double (code, uns,
1305 int1l, int1h, int2l, int2h,
1306 &garbagel, &garbageh, &low, &hi);
1312 low = (((unsigned HOST_WIDE_INT) int1h
1313 < (unsigned HOST_WIDE_INT) int2h)
1314 || (((unsigned HOST_WIDE_INT) int1h
1315 == (unsigned HOST_WIDE_INT) int2h)
1318 low = (int1h < int2h
1319 || (int1h == int2h && int1l < int2l));
1321 if (low == (code == MIN_EXPR))
1322 low = int1l, hi = int1h;
1324 low = int2l, hi = int2h;
1331 /* If this is for a sizetype, can be represented as one (signed)
1332 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1335 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338 return size_int_type_wide (low, type);
1341 t = build_int_2 (low, hi);
1342 TREE_TYPE (t) = TREE_TYPE (arg1);
1347 ? (!uns || is_sizetype) && overflow
1348 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1350 | TREE_OVERFLOW (arg1)
1351 | TREE_OVERFLOW (arg2));
1353 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354 So check if force_fit_type truncated the value. */
1356 && ! TREE_OVERFLOW (t)
1357 && (TREE_INT_CST_HIGH (t) != hi
1358 || TREE_INT_CST_LOW (t) != low))
1359 TREE_OVERFLOW (t) = 1;
1361 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362 | TREE_CONSTANT_OVERFLOW (arg1)
1363 | TREE_CONSTANT_OVERFLOW (arg2));
1367 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368 constant. We assume ARG1 and ARG2 have the same data type, or at least
1369 are the same kind of constant and the same machine mode.
1371 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1374 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1379 if (TREE_CODE (arg1) == INTEGER_CST)
1380 return int_const_binop (code, arg1, arg2, notrunc);
1382 if (TREE_CODE (arg1) == REAL_CST)
1384 enum machine_mode mode;
1387 REAL_VALUE_TYPE value;
1390 d1 = TREE_REAL_CST (arg1);
1391 d2 = TREE_REAL_CST (arg2);
1393 type = TREE_TYPE (arg1);
1394 mode = TYPE_MODE (type);
1396 /* Don't perform operation if we honor signaling NaNs and
1397 either operand is a NaN. */
1398 if (HONOR_SNANS (mode)
1399 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1402 /* Don't perform operation if it would raise a division
1403 by zero exception. */
1404 if (code == RDIV_EXPR
1405 && REAL_VALUES_EQUAL (d2, dconst0)
1406 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1409 /* If either operand is a NaN, just return it. Otherwise, set up
1410 for floating-point trap; we return an overflow. */
1411 if (REAL_VALUE_ISNAN (d1))
1413 else if (REAL_VALUE_ISNAN (d2))
1416 REAL_ARITHMETIC (value, code, d1, d2);
1418 t = build_real (type, real_value_truncate (mode, value));
1421 = (force_fit_type (t, 0)
1422 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423 TREE_CONSTANT_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2);
1429 if (TREE_CODE (arg1) == COMPLEX_CST)
1431 tree type = TREE_TYPE (arg1);
1432 tree r1 = TREE_REALPART (arg1);
1433 tree i1 = TREE_IMAGPART (arg1);
1434 tree r2 = TREE_REALPART (arg2);
1435 tree i2 = TREE_IMAGPART (arg2);
1441 t = build_complex (type,
1442 const_binop (PLUS_EXPR, r1, r2, notrunc),
1443 const_binop (PLUS_EXPR, i1, i2, notrunc));
1447 t = build_complex (type,
1448 const_binop (MINUS_EXPR, r1, r2, notrunc),
1449 const_binop (MINUS_EXPR, i1, i2, notrunc));
1453 t = build_complex (type,
1454 const_binop (MINUS_EXPR,
1455 const_binop (MULT_EXPR,
1457 const_binop (MULT_EXPR,
1460 const_binop (PLUS_EXPR,
1461 const_binop (MULT_EXPR,
1463 const_binop (MULT_EXPR,
1471 = const_binop (PLUS_EXPR,
1472 const_binop (MULT_EXPR, r2, r2, notrunc),
1473 const_binop (MULT_EXPR, i2, i2, notrunc),
1476 t = build_complex (type,
1478 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1479 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1480 const_binop (PLUS_EXPR,
1481 const_binop (MULT_EXPR, r1, r2,
1483 const_binop (MULT_EXPR, i1, i2,
1486 magsquared, notrunc),
1488 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1489 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1490 const_binop (MINUS_EXPR,
1491 const_binop (MULT_EXPR, i1, r2,
1493 const_binop (MULT_EXPR, r1, i2,
1496 magsquared, notrunc));
1508 /* These are the hash table functions for the hash table of INTEGER_CST
1509 nodes of a sizetype. */
1511 /* Return the hash code code X, an INTEGER_CST. */
1514 size_htab_hash (const void *x)
1518 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1519 ^ htab_hash_pointer (TREE_TYPE (t))
1520 ^ (TREE_OVERFLOW (t) << 20));
1523 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1524 is the same as that given by *Y, which is the same. */
1527 size_htab_eq (const void *x, const void *y)
1532 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1533 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1534 && TREE_TYPE (xt) == TREE_TYPE (yt)
1535 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1538 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1539 bits are given by NUMBER and of the sizetype represented by KIND. */
1542 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1544 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1547 /* Likewise, but the desired type is specified explicitly. */
1549 static GTY (()) tree new_const;
1550 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1554 size_int_type_wide (HOST_WIDE_INT number, tree type)
1560 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1561 new_const = make_node (INTEGER_CST);
1564 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1565 hash table, we return the value from the hash table. Otherwise, we
1566 place that in the hash table and make a new node for the next time. */
1567 TREE_INT_CST_LOW (new_const) = number;
1568 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1569 TREE_TYPE (new_const) = type;
1570 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1571 = force_fit_type (new_const, 0);
1573 slot = htab_find_slot (size_htab, new_const, INSERT);
1579 new_const = make_node (INTEGER_CST);
1583 return (tree) *slot;
1586 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1587 is a tree code. The type of the result is taken from the operands.
1588 Both must be the same type integer type and it must be a size type.
1589 If the operands are constant, so is the result. */
1592 size_binop (enum tree_code code, tree arg0, tree arg1)
1594 tree type = TREE_TYPE (arg0);
1596 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1597 || type != TREE_TYPE (arg1))
1600 /* Handle the special case of two integer constants faster. */
1601 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1603 /* And some specific cases even faster than that. */
1604 if (code == PLUS_EXPR && integer_zerop (arg0))
1606 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1607 && integer_zerop (arg1))
1609 else if (code == MULT_EXPR && integer_onep (arg0))
1612 /* Handle general case of two integer constants. */
1613 return int_const_binop (code, arg0, arg1, 0);
1616 if (arg0 == error_mark_node || arg1 == error_mark_node)
1617 return error_mark_node;
1619 return fold (build (code, type, arg0, arg1));
1622 /* Given two values, either both of sizetype or both of bitsizetype,
1623 compute the difference between the two values. Return the value
1624 in signed type corresponding to the type of the operands. */
1627 size_diffop (tree arg0, tree arg1)
1629 tree type = TREE_TYPE (arg0);
1632 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1633 || type != TREE_TYPE (arg1))
1636 /* If the type is already signed, just do the simple thing. */
1637 if (! TREE_UNSIGNED (type))
1638 return size_binop (MINUS_EXPR, arg0, arg1);
1640 ctype = (type == bitsizetype || type == ubitsizetype
1641 ? sbitsizetype : ssizetype);
1643 /* If either operand is not a constant, do the conversions to the signed
1644 type and subtract. The hardware will do the right thing with any
1645 overflow in the subtraction. */
1646 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1647 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1648 fold_convert (ctype, arg1));
1650 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1651 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1652 overflow) and negate (which can't either). Special-case a result
1653 of zero while we're here. */
1654 if (tree_int_cst_equal (arg0, arg1))
1655 return fold_convert (ctype, integer_zero_node);
1656 else if (tree_int_cst_lt (arg1, arg0))
1657 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1659 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1660 fold_convert (ctype, size_binop (MINUS_EXPR,
1665 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1666 type TYPE. If no simplification can be done return NULL_TREE. */
1669 fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1675 if (TREE_TYPE (arg1) == type)
1678 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1680 if (TREE_CODE (arg1) == INTEGER_CST)
1682 /* If we would build a constant wider than GCC supports,
1683 leave the conversion unfolded. */
1684 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1687 /* If we are trying to make a sizetype for a small integer, use
1688 size_int to pick up cached types to reduce duplicate nodes. */
1689 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1690 && !TREE_CONSTANT_OVERFLOW (arg1)
1691 && compare_tree_int (arg1, 10000) < 0)
1692 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1694 /* Given an integer constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1697 TREE_INT_CST_HIGH (arg1));
1698 TREE_TYPE (t) = type;
1699 /* Indicate an overflow if (1) ARG1 already overflowed,
1700 or (2) force_fit_type indicates an overflow.
1701 Tell force_fit_type that an overflow has already occurred
1702 if ARG1 is a too-large unsigned value and T is signed.
1703 But don't indicate an overflow if converting a pointer. */
1705 = ((force_fit_type (t,
1706 (TREE_INT_CST_HIGH (arg1) < 0
1707 && (TREE_UNSIGNED (type)
1708 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1709 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1710 || TREE_OVERFLOW (arg1));
1711 TREE_CONSTANT_OVERFLOW (t)
1712 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1715 else if (TREE_CODE (arg1) == REAL_CST)
1717 /* The following code implements the floating point to integer
1718 conversion rules required by the Java Language Specification,
1719 that IEEE NaNs are mapped to zero and values that overflow
1720 the target precision saturate, i.e. values greater than
1721 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1722 are mapped to INT_MIN. These semantics are allowed by the
1723 C and C++ standards that simply state that the behavior of
1724 FP-to-integer conversion is unspecified upon overflow. */
1726 HOST_WIDE_INT high, low;
1728 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1729 /* If x is NaN, return zero and show we have an overflow. */
1730 if (REAL_VALUE_ISNAN (x))
1737 /* See if X will be in range after truncation towards 0.
1738 To compensate for truncation, move the bounds away from 0,
1739 but reject if X exactly equals the adjusted bounds. */
1743 tree lt = TYPE_MIN_VALUE (type);
1744 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1745 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1746 if (! REAL_VALUES_LESS (l, x))
1749 high = TREE_INT_CST_HIGH (lt);
1750 low = TREE_INT_CST_LOW (lt);
1756 tree ut = TYPE_MAX_VALUE (type);
1759 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1760 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1761 if (! REAL_VALUES_LESS (x, u))
1764 high = TREE_INT_CST_HIGH (ut);
1765 low = TREE_INT_CST_LOW (ut);
1771 REAL_VALUE_TO_INT (&low, &high, x);
1773 t = build_int_2 (low, high);
1774 TREE_TYPE (t) = type;
1776 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1777 TREE_CONSTANT_OVERFLOW (t)
1778 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1782 else if (TREE_CODE (type) == REAL_TYPE)
1784 if (TREE_CODE (arg1) == INTEGER_CST)
1785 return build_real_from_int_cst (type, arg1);
1786 if (TREE_CODE (arg1) == REAL_CST)
1788 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1790 /* We make a copy of ARG1 so that we don't modify an
1791 existing constant tree. */
1792 t = copy_node (arg1);
1793 TREE_TYPE (t) = type;
1797 t = build_real (type,
1798 real_value_truncate (TYPE_MODE (type),
1799 TREE_REAL_CST (arg1)));
1802 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1803 TREE_CONSTANT_OVERFLOW (t)
1804 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1811 /* Convert expression ARG to type TYPE. Used by the middle-end for
1812 simple conversions in preference to calling the front-end's convert. */
1815 fold_convert (tree type, tree arg)
1817 tree orig = TREE_TYPE (arg);
1823 if (TREE_CODE (arg) == ERROR_MARK
1824 || TREE_CODE (type) == ERROR_MARK
1825 || TREE_CODE (orig) == ERROR_MARK)
1826 return error_mark_node;
1828 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1829 return fold (build1 (NOP_EXPR, type, arg));
1831 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1833 if (TREE_CODE (arg) == INTEGER_CST)
1835 tem = fold_convert_const (NOP_EXPR, type, arg);
1836 if (tem != NULL_TREE)
1839 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1840 return fold (build1 (NOP_EXPR, type, arg));
1841 if (TREE_CODE (orig) == COMPLEX_TYPE)
1843 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1844 return fold_convert (type, tem);
1846 if (TREE_CODE (orig) == VECTOR_TYPE
1847 && GET_MODE_SIZE (TYPE_MODE (type))
1848 == GET_MODE_SIZE (TYPE_MODE (orig)))
1849 return fold (build1 (NOP_EXPR, type, arg));
1851 else if (TREE_CODE (type) == REAL_TYPE)
1853 if (TREE_CODE (arg) == INTEGER_CST)
1855 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1856 if (tem != NULL_TREE)
1859 else if (TREE_CODE (arg) == REAL_CST)
1861 tem = fold_convert_const (NOP_EXPR, type, arg);
1862 if (tem != NULL_TREE)
1866 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1867 return fold (build1 (FLOAT_EXPR, type, arg));
1868 if (TREE_CODE (orig) == REAL_TYPE)
1869 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1871 if (TREE_CODE (orig) == COMPLEX_TYPE)
1873 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1874 return fold_convert (type, tem);
1877 else if (TREE_CODE (type) == COMPLEX_TYPE)
1879 if (INTEGRAL_TYPE_P (orig)
1880 || POINTER_TYPE_P (orig)
1881 || TREE_CODE (orig) == REAL_TYPE)
1882 return build (COMPLEX_EXPR, type,
1883 fold_convert (TREE_TYPE (type), arg),
1884 fold_convert (TREE_TYPE (type), integer_zero_node));
1885 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 if (TREE_CODE (arg) == COMPLEX_EXPR)
1891 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1892 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1893 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1896 arg = save_expr (arg);
1897 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1898 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1899 rpart = fold_convert (TREE_TYPE (type), rpart);
1900 ipart = fold_convert (TREE_TYPE (type), ipart);
1901 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1904 else if (TREE_CODE (type) == VECTOR_TYPE)
1906 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1907 && GET_MODE_SIZE (TYPE_MODE (type))
1908 == GET_MODE_SIZE (TYPE_MODE (orig)))
1909 return fold (build1 (NOP_EXPR, type, arg));
1910 if (TREE_CODE (orig) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type))
1912 == GET_MODE_SIZE (TYPE_MODE (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 else if (VOID_TYPE_P (type))
1916 return fold (build1 (CONVERT_EXPR, type, arg));
1920 /* Return an expr equal to X but certainly not valid as an lvalue. */
1927 /* These things are certainly not lvalues. */
1928 if (TREE_CODE (x) == NON_LVALUE_EXPR
1929 || TREE_CODE (x) == INTEGER_CST
1930 || TREE_CODE (x) == REAL_CST
1931 || TREE_CODE (x) == STRING_CST
1932 || TREE_CODE (x) == ADDR_EXPR)
1935 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1936 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1940 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1941 Zero means allow extended lvalues. */
1943 int pedantic_lvalues;
1945 /* When pedantic, return an expr equal to X but certainly not valid as a
1946 pedantic lvalue. Otherwise, return X. */
1949 pedantic_non_lvalue (tree x)
1951 if (pedantic_lvalues)
1952 return non_lvalue (x);
1957 /* Given a tree comparison code, return the code that is the logical inverse
1958 of the given code. It is not safe to do this for floating-point
1959 comparisons, except for NE_EXPR and EQ_EXPR. */
1961 static enum tree_code
1962 invert_tree_comparison (enum tree_code code)
1983 /* Similar, but return the comparison that results if the operands are
1984 swapped. This is safe for floating-point. */
1986 static enum tree_code
1987 swap_tree_comparison (enum tree_code code)
2008 /* Convert a comparison tree code from an enum tree_code representation
2009 into a compcode bit-based encoding. This function is the inverse of
2010 compcode_to_comparison. */
2013 comparison_to_compcode (enum tree_code code)
2034 /* Convert a compcode bit-based encoding of a comparison operator back
2035 to GCC's enum tree_code representation. This function is the
2036 inverse of comparison_to_compcode. */
2038 static enum tree_code
2039 compcode_to_comparison (int code)
2060 /* Return nonzero if CODE is a tree code that represents a truth value. */
2063 truth_value_p (enum tree_code code)
2065 return (TREE_CODE_CLASS (code) == '<'
2066 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2067 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2068 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2071 /* Return nonzero if two operands (typically of the same tree node)
2072 are necessarily equal. If either argument has side-effects this
2073 function returns zero.
2075 If ONLY_CONST is nonzero, only return nonzero for constants.
2076 This function tests whether the operands are indistinguishable;
2077 it does not test whether they are equal using C's == operation.
2078 The distinction is important for IEEE floating point, because
2079 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2080 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2082 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2083 even though it may hold multiple values during a function.
2084 This is because a GCC tree node guarantees that nothing else is
2085 executed between the evaluation of its "operands" (which may often
2086 be evaluated in arbitrary order). Hence if the operands themselves
2087 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2088 same value in each operand/subexpression. Hence a zero value for
2089 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2090 If comparing arbitrary expression trees, such as from different
2091 statements, ONLY_CONST must usually be nonzero. */
2094 operand_equal_p (tree arg0, tree arg1, int only_const)
2098 /* If both types don't have the same signedness, then we can't consider
2099 them equal. We must check this before the STRIP_NOPS calls
2100 because they may change the signedness of the arguments. */
2101 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2107 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2108 /* This is needed for conversions and for COMPONENT_REF.
2109 Might as well play it safe and always test this. */
2110 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2111 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2112 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2115 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2116 We don't care about side effects in that case because the SAVE_EXPR
2117 takes care of that for us. In all other cases, two expressions are
2118 equal if they have no side effects. If we have two identical
2119 expressions with side effects that should be treated the same due
2120 to the only side effects being identical SAVE_EXPR's, that will
2121 be detected in the recursive calls below. */
2122 if (arg0 == arg1 && ! only_const
2123 && (TREE_CODE (arg0) == SAVE_EXPR
2124 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2127 /* Next handle constant cases, those for which we can return 1 even
2128 if ONLY_CONST is set. */
2129 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2130 switch (TREE_CODE (arg0))
2133 return (! TREE_CONSTANT_OVERFLOW (arg0)
2134 && ! TREE_CONSTANT_OVERFLOW (arg1)
2135 && tree_int_cst_equal (arg0, arg1));
2138 return (! TREE_CONSTANT_OVERFLOW (arg0)
2139 && ! TREE_CONSTANT_OVERFLOW (arg1)
2140 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2141 TREE_REAL_CST (arg1)));
2147 if (TREE_CONSTANT_OVERFLOW (arg0)
2148 || TREE_CONSTANT_OVERFLOW (arg1))
2151 v1 = TREE_VECTOR_CST_ELTS (arg0);
2152 v2 = TREE_VECTOR_CST_ELTS (arg1);
2155 if (!operand_equal_p (v1, v2, only_const))
2157 v1 = TREE_CHAIN (v1);
2158 v2 = TREE_CHAIN (v2);
2165 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2167 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2171 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2172 && ! memcmp (TREE_STRING_POINTER (arg0),
2173 TREE_STRING_POINTER (arg1),
2174 TREE_STRING_LENGTH (arg0)));
2177 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2186 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2189 /* Two conversions are equal only if signedness and modes match. */
2190 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2191 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2192 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2195 return operand_equal_p (TREE_OPERAND (arg0, 0),
2196 TREE_OPERAND (arg1, 0), 0);
2200 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2201 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2205 /* For commutative ops, allow the other order. */
2206 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2207 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2208 || TREE_CODE (arg0) == BIT_IOR_EXPR
2209 || TREE_CODE (arg0) == BIT_XOR_EXPR
2210 || TREE_CODE (arg0) == BIT_AND_EXPR
2211 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2212 && operand_equal_p (TREE_OPERAND (arg0, 0),
2213 TREE_OPERAND (arg1, 1), 0)
2214 && operand_equal_p (TREE_OPERAND (arg0, 1),
2215 TREE_OPERAND (arg1, 0), 0));
2218 /* If either of the pointer (or reference) expressions we are
2219 dereferencing contain a side effect, these cannot be equal. */
2220 if (TREE_SIDE_EFFECTS (arg0)
2221 || TREE_SIDE_EFFECTS (arg1))
2224 switch (TREE_CODE (arg0))
2227 return operand_equal_p (TREE_OPERAND (arg0, 0),
2228 TREE_OPERAND (arg1, 0), 0);
2232 case ARRAY_RANGE_REF:
2233 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2234 TREE_OPERAND (arg1, 0), 0)
2235 && operand_equal_p (TREE_OPERAND (arg0, 1),
2236 TREE_OPERAND (arg1, 1), 0));
2239 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2240 TREE_OPERAND (arg1, 0), 0)
2241 && operand_equal_p (TREE_OPERAND (arg0, 1),
2242 TREE_OPERAND (arg1, 1), 0)
2243 && operand_equal_p (TREE_OPERAND (arg0, 2),
2244 TREE_OPERAND (arg1, 2), 0));
2250 switch (TREE_CODE (arg0))
2253 case TRUTH_NOT_EXPR:
2254 return operand_equal_p (TREE_OPERAND (arg0, 0),
2255 TREE_OPERAND (arg1, 0), 0);
2258 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2261 /* If the CALL_EXPRs call different functions, then they
2262 clearly can not be equal. */
2263 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2264 TREE_OPERAND (arg1, 0), 0))
2267 /* Only consider const functions equivalent. */
2268 fndecl = get_callee_fndecl (arg0);
2269 if (fndecl == NULL_TREE
2270 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2273 /* Now see if all the arguments are the same. operand_equal_p
2274 does not handle TREE_LIST, so we walk the operands here
2275 feeding them to operand_equal_p. */
2276 arg0 = TREE_OPERAND (arg0, 1);
2277 arg1 = TREE_OPERAND (arg1, 1);
2278 while (arg0 && arg1)
2280 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2283 arg0 = TREE_CHAIN (arg0);
2284 arg1 = TREE_CHAIN (arg1);
2287 /* If we get here and both argument lists are exhausted
2288 then the CALL_EXPRs are equal. */
2289 return ! (arg0 || arg1);
2296 /* Consider __builtin_sqrt equal to sqrt. */
2297 return TREE_CODE (arg0) == FUNCTION_DECL
2298 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2299 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2300 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2307 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2308 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2310 When in doubt, return 0. */
2313 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2315 int unsignedp1, unsignedpo;
2316 tree primarg0, primarg1, primother;
2317 unsigned int correct_width;
2319 if (operand_equal_p (arg0, arg1, 0))
2322 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2323 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2326 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2327 and see if the inner values are the same. This removes any
2328 signedness comparison, which doesn't matter here. */
2329 primarg0 = arg0, primarg1 = arg1;
2330 STRIP_NOPS (primarg0);
2331 STRIP_NOPS (primarg1);
2332 if (operand_equal_p (primarg0, primarg1, 0))
2335 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2336 actual comparison operand, ARG0.
2338 First throw away any conversions to wider types
2339 already present in the operands. */
2341 primarg1 = get_narrower (arg1, &unsignedp1);
2342 primother = get_narrower (other, &unsignedpo);
2344 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2345 if (unsignedp1 == unsignedpo
2346 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2347 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2349 tree type = TREE_TYPE (arg0);
2351 /* Make sure shorter operand is extended the right way
2352 to match the longer operand. */
2353 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2354 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2356 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2363 /* See if ARG is an expression that is either a comparison or is performing
2364 arithmetic on comparisons. The comparisons must only be comparing
2365 two different values, which will be stored in *CVAL1 and *CVAL2; if
2366 they are nonzero it means that some operands have already been found.
2367 No variables may be used anywhere else in the expression except in the
2368 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2369 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2371 If this is true, return 1. Otherwise, return zero. */
2374 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2376 enum tree_code code = TREE_CODE (arg);
2377 char class = TREE_CODE_CLASS (code);
2379 /* We can handle some of the 'e' cases here. */
2380 if (class == 'e' && code == TRUTH_NOT_EXPR)
2382 else if (class == 'e'
2383 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2384 || code == COMPOUND_EXPR))
2387 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2388 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2390 /* If we've already found a CVAL1 or CVAL2, this expression is
2391 two complex to handle. */
2392 if (*cval1 || *cval2)
2402 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2405 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2406 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2407 cval1, cval2, save_p));
2413 if (code == COND_EXPR)
2414 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2415 cval1, cval2, save_p)
2416 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2417 cval1, cval2, save_p)
2418 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2419 cval1, cval2, save_p));
2423 /* First see if we can handle the first operand, then the second. For
2424 the second operand, we know *CVAL1 can't be zero. It must be that
2425 one side of the comparison is each of the values; test for the
2426 case where this isn't true by failing if the two operands
2429 if (operand_equal_p (TREE_OPERAND (arg, 0),
2430 TREE_OPERAND (arg, 1), 0))
2434 *cval1 = TREE_OPERAND (arg, 0);
2435 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2437 else if (*cval2 == 0)
2438 *cval2 = TREE_OPERAND (arg, 0);
2439 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2444 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2446 else if (*cval2 == 0)
2447 *cval2 = TREE_OPERAND (arg, 1);
2448 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2460 /* ARG is a tree that is known to contain just arithmetic operations and
2461 comparisons. Evaluate the operations in the tree substituting NEW0 for
2462 any occurrence of OLD0 as an operand of a comparison and likewise for
2466 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2468 tree type = TREE_TYPE (arg);
2469 enum tree_code code = TREE_CODE (arg);
2470 char class = TREE_CODE_CLASS (code);
2472 /* We can handle some of the 'e' cases here. */
2473 if (class == 'e' && code == TRUTH_NOT_EXPR)
2475 else if (class == 'e'
2476 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2482 return fold (build1 (code, type,
2483 eval_subst (TREE_OPERAND (arg, 0),
2484 old0, new0, old1, new1)));
2487 return fold (build (code, type,
2488 eval_subst (TREE_OPERAND (arg, 0),
2489 old0, new0, old1, new1),
2490 eval_subst (TREE_OPERAND (arg, 1),
2491 old0, new0, old1, new1)));
2497 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2500 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2503 return fold (build (code, type,
2504 eval_subst (TREE_OPERAND (arg, 0),
2505 old0, new0, old1, new1),
2506 eval_subst (TREE_OPERAND (arg, 1),
2507 old0, new0, old1, new1),
2508 eval_subst (TREE_OPERAND (arg, 2),
2509 old0, new0, old1, new1)));
2513 /* Fall through - ??? */
2517 tree arg0 = TREE_OPERAND (arg, 0);
2518 tree arg1 = TREE_OPERAND (arg, 1);
2520 /* We need to check both for exact equality and tree equality. The
2521 former will be true if the operand has a side-effect. In that
2522 case, we know the operand occurred exactly once. */
2524 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2526 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2529 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2531 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2534 return fold (build (code, type, arg0, arg1));
2542 /* Return a tree for the case when the result of an expression is RESULT
2543 converted to TYPE and OMITTED was previously an operand of the expression
2544 but is now not needed (e.g., we folded OMITTED * 0).
2546 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2547 the conversion of RESULT to TYPE. */
2550 omit_one_operand (tree type, tree result, tree omitted)
2552 tree t = fold_convert (type, result);
2554 if (TREE_SIDE_EFFECTS (omitted))
2555 return build (COMPOUND_EXPR, type, omitted, t);
2557 return non_lvalue (t);
2560 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2563 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2565 tree t = fold_convert (type, result);
2567 if (TREE_SIDE_EFFECTS (omitted))
2568 return build (COMPOUND_EXPR, type, omitted, t);
2570 return pedantic_non_lvalue (t);
2573 /* Return a simplified tree node for the truth-negation of ARG. This
2574 never alters ARG itself. We assume that ARG is an operation that
2575 returns a truth value (0 or 1). */
2578 invert_truthvalue (tree arg)
2580 tree type = TREE_TYPE (arg);
2581 enum tree_code code = TREE_CODE (arg);
2583 if (code == ERROR_MARK)
2586 /* If this is a comparison, we can simply invert it, except for
2587 floating-point non-equality comparisons, in which case we just
2588 enclose a TRUTH_NOT_EXPR around what we have. */
2590 if (TREE_CODE_CLASS (code) == '<')
2592 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2593 && !flag_unsafe_math_optimizations
2596 return build1 (TRUTH_NOT_EXPR, type, arg);
2598 return build (invert_tree_comparison (code), type,
2599 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2605 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2607 case TRUTH_AND_EXPR:
2608 return build (TRUTH_OR_EXPR, type,
2609 invert_truthvalue (TREE_OPERAND (arg, 0)),
2610 invert_truthvalue (TREE_OPERAND (arg, 1)));
2613 return build (TRUTH_AND_EXPR, type,
2614 invert_truthvalue (TREE_OPERAND (arg, 0)),
2615 invert_truthvalue (TREE_OPERAND (arg, 1)));
2617 case TRUTH_XOR_EXPR:
2618 /* Here we can invert either operand. We invert the first operand
2619 unless the second operand is a TRUTH_NOT_EXPR in which case our
2620 result is the XOR of the first operand with the inside of the
2621 negation of the second operand. */
2623 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2624 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2625 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2627 return build (TRUTH_XOR_EXPR, type,
2628 invert_truthvalue (TREE_OPERAND (arg, 0)),
2629 TREE_OPERAND (arg, 1));
2631 case TRUTH_ANDIF_EXPR:
2632 return build (TRUTH_ORIF_EXPR, type,
2633 invert_truthvalue (TREE_OPERAND (arg, 0)),
2634 invert_truthvalue (TREE_OPERAND (arg, 1)));
2636 case TRUTH_ORIF_EXPR:
2637 return build (TRUTH_ANDIF_EXPR, type,
2638 invert_truthvalue (TREE_OPERAND (arg, 0)),
2639 invert_truthvalue (TREE_OPERAND (arg, 1)));
2641 case TRUTH_NOT_EXPR:
2642 return TREE_OPERAND (arg, 0);
2645 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2646 invert_truthvalue (TREE_OPERAND (arg, 1)),
2647 invert_truthvalue (TREE_OPERAND (arg, 2)));
2650 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2651 invert_truthvalue (TREE_OPERAND (arg, 1)));
2653 case WITH_RECORD_EXPR:
2654 return build (WITH_RECORD_EXPR, type,
2655 invert_truthvalue (TREE_OPERAND (arg, 0)),
2656 TREE_OPERAND (arg, 1));
2658 case NON_LVALUE_EXPR:
2659 return invert_truthvalue (TREE_OPERAND (arg, 0));
2664 return build1 (TREE_CODE (arg), type,
2665 invert_truthvalue (TREE_OPERAND (arg, 0)));
2668 if (!integer_onep (TREE_OPERAND (arg, 1)))
2670 return build (EQ_EXPR, type, arg,
2671 fold_convert (type, integer_zero_node));
2674 return build1 (TRUTH_NOT_EXPR, type, arg);
2676 case CLEANUP_POINT_EXPR:
2677 return build1 (CLEANUP_POINT_EXPR, type,
2678 invert_truthvalue (TREE_OPERAND (arg, 0)));
2683 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2685 return build1 (TRUTH_NOT_EXPR, type, arg);
2688 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2689 operands are another bit-wise operation with a common input. If so,
2690 distribute the bit operations to save an operation and possibly two if
2691 constants are involved. For example, convert
2692 (A | B) & (A | C) into A | (B & C)
2693 Further simplification will occur if B and C are constants.
2695 If this optimization cannot be done, 0 will be returned. */
2698 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2703 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2704 || TREE_CODE (arg0) == code
2705 || (TREE_CODE (arg0) != BIT_AND_EXPR
2706 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2709 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2711 common = TREE_OPERAND (arg0, 0);
2712 left = TREE_OPERAND (arg0, 1);
2713 right = TREE_OPERAND (arg1, 1);
2715 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2717 common = TREE_OPERAND (arg0, 0);
2718 left = TREE_OPERAND (arg0, 1);
2719 right = TREE_OPERAND (arg1, 0);
2721 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2723 common = TREE_OPERAND (arg0, 1);
2724 left = TREE_OPERAND (arg0, 0);
2725 right = TREE_OPERAND (arg1, 1);
2727 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2729 common = TREE_OPERAND (arg0, 1);
2730 left = TREE_OPERAND (arg0, 0);
2731 right = TREE_OPERAND (arg1, 0);
2736 return fold (build (TREE_CODE (arg0), type, common,
2737 fold (build (code, type, left, right))));
2740 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2741 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2744 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2747 tree result = build (BIT_FIELD_REF, type, inner,
2748 size_int (bitsize), bitsize_int (bitpos));
2750 TREE_UNSIGNED (result) = unsignedp;
2755 /* Optimize a bit-field compare.
2757 There are two cases: First is a compare against a constant and the
2758 second is a comparison of two items where the fields are at the same
2759 bit position relative to the start of a chunk (byte, halfword, word)
2760 large enough to contain it. In these cases we can avoid the shift
2761 implicit in bitfield extractions.
2763 For constants, we emit a compare of the shifted constant with the
2764 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2765 compared. For two fields at the same position, we do the ANDs with the
2766 similar mask and compare the result of the ANDs.
2768 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2769 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2770 are the left and right operands of the comparison, respectively.
2772 If the optimization described above can be done, we return the resulting
2773 tree. Otherwise we return zero. */
2776 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2779 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2780 tree type = TREE_TYPE (lhs);
2781 tree signed_type, unsigned_type;
2782 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2783 enum machine_mode lmode, rmode, nmode;
2784 int lunsignedp, runsignedp;
2785 int lvolatilep = 0, rvolatilep = 0;
2786 tree linner, rinner = NULL_TREE;
2790 /* Get all the information about the extractions being done. If the bit size
2791 if the same as the size of the underlying object, we aren't doing an
2792 extraction at all and so can do nothing. We also don't want to
2793 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2794 then will no longer be able to replace it. */
2795 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2796 &lunsignedp, &lvolatilep);
2797 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2798 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2803 /* If this is not a constant, we can only do something if bit positions,
2804 sizes, and signedness are the same. */
2805 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2806 &runsignedp, &rvolatilep);
2808 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2809 || lunsignedp != runsignedp || offset != 0
2810 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2814 /* See if we can find a mode to refer to this field. We should be able to,
2815 but fail if we can't. */
2816 nmode = get_best_mode (lbitsize, lbitpos,
2817 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2818 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2819 TYPE_ALIGN (TREE_TYPE (rinner))),
2820 word_mode, lvolatilep || rvolatilep);
2821 if (nmode == VOIDmode)
2824 /* Set signed and unsigned types of the precision of this mode for the
2826 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2827 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2829 /* Compute the bit position and size for the new reference and our offset
2830 within it. If the new reference is the same size as the original, we
2831 won't optimize anything, so return zero. */
2832 nbitsize = GET_MODE_BITSIZE (nmode);
2833 nbitpos = lbitpos & ~ (nbitsize - 1);
2835 if (nbitsize == lbitsize)
2838 if (BYTES_BIG_ENDIAN)
2839 lbitpos = nbitsize - lbitsize - lbitpos;
2841 /* Make the mask to be used against the extracted field. */
2842 mask = build_int_2 (~0, ~0);
2843 TREE_TYPE (mask) = unsigned_type;
2844 force_fit_type (mask, 0);
2845 mask = fold_convert (unsigned_type, mask);
2846 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2847 mask = const_binop (RSHIFT_EXPR, mask,
2848 size_int (nbitsize - lbitsize - lbitpos), 0);
2851 /* If not comparing with constant, just rework the comparison
2853 return build (code, compare_type,
2854 build (BIT_AND_EXPR, unsigned_type,
2855 make_bit_field_ref (linner, unsigned_type,
2856 nbitsize, nbitpos, 1),
2858 build (BIT_AND_EXPR, unsigned_type,
2859 make_bit_field_ref (rinner, unsigned_type,
2860 nbitsize, nbitpos, 1),
2863 /* Otherwise, we are handling the constant case. See if the constant is too
2864 big for the field. Warn and return a tree of for 0 (false) if so. We do
2865 this not only for its own sake, but to avoid having to test for this
2866 error case below. If we didn't, we might generate wrong code.
2868 For unsigned fields, the constant shifted right by the field length should
2869 be all zero. For signed fields, the high-order bits should agree with
2874 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2875 fold_convert (unsigned_type, rhs),
2876 size_int (lbitsize), 0)))
2878 warning ("comparison is always %d due to width of bit-field",
2880 return fold_convert (compare_type,
2882 ? integer_one_node : integer_zero_node));
2887 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2888 size_int (lbitsize - 1), 0);
2889 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2891 warning ("comparison is always %d due to width of bit-field",
2893 return fold_convert (compare_type,
2895 ? integer_one_node : integer_zero_node));
2899 /* Single-bit compares should always be against zero. */
2900 if (lbitsize == 1 && ! integer_zerop (rhs))
2902 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2903 rhs = fold_convert (type, integer_zero_node);
2906 /* Make a new bitfield reference, shift the constant over the
2907 appropriate number of bits and mask it with the computed mask
2908 (in case this was a signed field). If we changed it, make a new one. */
2909 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2912 TREE_SIDE_EFFECTS (lhs) = 1;
2913 TREE_THIS_VOLATILE (lhs) = 1;
2916 rhs = fold (const_binop (BIT_AND_EXPR,
2917 const_binop (LSHIFT_EXPR,
2918 fold_convert (unsigned_type, rhs),
2919 size_int (lbitpos), 0),
2922 return build (code, compare_type,
2923 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2927 /* Subroutine for fold_truthop: decode a field reference.
2929 If EXP is a comparison reference, we return the innermost reference.
2931 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2932 set to the starting bit number.
2934 If the innermost field can be completely contained in a mode-sized
2935 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2937 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2938 otherwise it is not changed.
2940 *PUNSIGNEDP is set to the signedness of the field.
2942 *PMASK is set to the mask used. This is either contained in a
2943 BIT_AND_EXPR or derived from the width of the field.
2945 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2947 Return 0 if this is not a component reference or is one that we can't
2948 do anything with. */
2951 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2952 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2953 int *punsignedp, int *pvolatilep,
2954 tree *pmask, tree *pand_mask)
2956 tree outer_type = 0;
2958 tree mask, inner, offset;
2960 unsigned int precision;
2962 /* All the optimizations using this function assume integer fields.
2963 There are problems with FP fields since the type_for_size call
2964 below can fail for, e.g., XFmode. */
2965 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2968 /* We are interested in the bare arrangement of bits, so strip everything
2969 that doesn't affect the machine mode. However, record the type of the
2970 outermost expression if it may matter below. */
2971 if (TREE_CODE (exp) == NOP_EXPR
2972 || TREE_CODE (exp) == CONVERT_EXPR
2973 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2974 outer_type = TREE_TYPE (exp);
2977 if (TREE_CODE (exp) == BIT_AND_EXPR)
2979 and_mask = TREE_OPERAND (exp, 1);
2980 exp = TREE_OPERAND (exp, 0);
2981 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2982 if (TREE_CODE (and_mask) != INTEGER_CST)
2986 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2987 punsignedp, pvolatilep);
2988 if ((inner == exp && and_mask == 0)
2989 || *pbitsize < 0 || offset != 0
2990 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2993 /* If the number of bits in the reference is the same as the bitsize of
2994 the outer type, then the outer type gives the signedness. Otherwise
2995 (in case of a small bitfield) the signedness is unchanged. */
2996 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2997 *punsignedp = TREE_UNSIGNED (outer_type);
2999 /* Compute the mask to access the bitfield. */
3000 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3001 precision = TYPE_PRECISION (unsigned_type);
3003 mask = build_int_2 (~0, ~0);
3004 TREE_TYPE (mask) = unsigned_type;
3005 force_fit_type (mask, 0);
3006 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3007 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3009 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3011 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3012 fold_convert (unsigned_type, and_mask), mask));
3015 *pand_mask = and_mask;
3019 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3023 all_ones_mask_p (tree mask, int size)
3025 tree type = TREE_TYPE (mask);
3026 unsigned int precision = TYPE_PRECISION (type);
3029 tmask = build_int_2 (~0, ~0);
3030 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3031 force_fit_type (tmask, 0);
3033 tree_int_cst_equal (mask,
3034 const_binop (RSHIFT_EXPR,
3035 const_binop (LSHIFT_EXPR, tmask,
3036 size_int (precision - size),
3038 size_int (precision - size), 0));
3041 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3042 represents the sign bit of EXP's type. If EXP represents a sign
3043 or zero extension, also test VAL against the unextended type.
3044 The return value is the (sub)expression whose sign bit is VAL,
3045 or NULL_TREE otherwise. */
3048 sign_bit_p (tree exp, tree val)
3050 unsigned HOST_WIDE_INT mask_lo, lo;
3051 HOST_WIDE_INT mask_hi, hi;
3055 /* Tree EXP must have an integral type. */
3056 t = TREE_TYPE (exp);
3057 if (! INTEGRAL_TYPE_P (t))
3060 /* Tree VAL must be an integer constant. */
3061 if (TREE_CODE (val) != INTEGER_CST
3062 || TREE_CONSTANT_OVERFLOW (val))
3065 width = TYPE_PRECISION (t);
3066 if (width > HOST_BITS_PER_WIDE_INT)
3068 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3071 mask_hi = ((unsigned HOST_WIDE_INT) -1
3072 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3078 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3081 mask_lo = ((unsigned HOST_WIDE_INT) -1
3082 >> (HOST_BITS_PER_WIDE_INT - width));
3085 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3086 treat VAL as if it were unsigned. */
3087 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3088 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3091 /* Handle extension from a narrower type. */
3092 if (TREE_CODE (exp) == NOP_EXPR
3093 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3094 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3099 /* Subroutine for fold_truthop: determine if an operand is simple enough
3100 to be evaluated unconditionally. */
3103 simple_operand_p (tree exp)
3105 /* Strip any conversions that don't change the machine mode. */
3106 while ((TREE_CODE (exp) == NOP_EXPR
3107 || TREE_CODE (exp) == CONVERT_EXPR)
3108 && (TYPE_MODE (TREE_TYPE (exp))
3109 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3110 exp = TREE_OPERAND (exp, 0);
3112 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3114 && ! TREE_ADDRESSABLE (exp)
3115 && ! TREE_THIS_VOLATILE (exp)
3116 && ! DECL_NONLOCAL (exp)
3117 /* Don't regard global variables as simple. They may be
3118 allocated in ways unknown to the compiler (shared memory,
3119 #pragma weak, etc). */
3120 && ! TREE_PUBLIC (exp)
3121 && ! DECL_EXTERNAL (exp)
3122 /* Loading a static variable is unduly expensive, but global
3123 registers aren't expensive. */
3124 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3127 /* The following functions are subroutines to fold_range_test and allow it to
3128 try to change a logical combination of comparisons into a range test.
3131 X == 2 || X == 3 || X == 4 || X == 5
3135 (unsigned) (X - 2) <= 3
3137 We describe each set of comparisons as being either inside or outside
3138 a range, using a variable named like IN_P, and then describe the
3139 range with a lower and upper bound. If one of the bounds is omitted,
3140 it represents either the highest or lowest value of the type.
3142 In the comments below, we represent a range by two numbers in brackets
3143 preceded by a "+" to designate being inside that range, or a "-" to
3144 designate being outside that range, so the condition can be inverted by
3145 flipping the prefix. An omitted bound is represented by a "-". For
3146 example, "- [-, 10]" means being outside the range starting at the lowest
3147 possible value and ending at 10, in other words, being greater than 10.
3148 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3151 We set up things so that the missing bounds are handled in a consistent
3152 manner so neither a missing bound nor "true" and "false" need to be
3153 handled using a special case. */
3155 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3156 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3157 and UPPER1_P are nonzero if the respective argument is an upper bound
3158 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3159 must be specified for a comparison. ARG1 will be converted to ARG0's
3160 type if both are specified. */
3163 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3164 tree arg1, int upper1_p)
3170 /* If neither arg represents infinity, do the normal operation.
3171 Else, if not a comparison, return infinity. Else handle the special
3172 comparison rules. Note that most of the cases below won't occur, but
3173 are handled for consistency. */
3175 if (arg0 != 0 && arg1 != 0)
3177 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3178 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3180 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3183 if (TREE_CODE_CLASS (code) != '<')
3186 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3187 for neither. In real maths, we cannot assume open ended ranges are
3188 the same. But, this is computer arithmetic, where numbers are finite.
3189 We can therefore make the transformation of any unbounded range with
3190 the value Z, Z being greater than any representable number. This permits
3191 us to treat unbounded ranges as equal. */
3192 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3193 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3197 result = sgn0 == sgn1;
3200 result = sgn0 != sgn1;
3203 result = sgn0 < sgn1;
3206 result = sgn0 <= sgn1;
3209 result = sgn0 > sgn1;
3212 result = sgn0 >= sgn1;
3218 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3221 /* Given EXP, a logical expression, set the range it is testing into
3222 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3223 actually being tested. *PLOW and *PHIGH will be made of the same type
3224 as the returned expression. If EXP is not a comparison, we will most
3225 likely not be returning a useful value and range. */
3228 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3230 enum tree_code code;
3231 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3232 tree orig_type = NULL_TREE;
3234 tree low, high, n_low, n_high;
3236 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3237 and see if we can refine the range. Some of the cases below may not
3238 happen, but it doesn't seem worth worrying about this. We "continue"
3239 the outer loop when we've changed something; otherwise we "break"
3240 the switch, which will "break" the while. */
3243 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3247 code = TREE_CODE (exp);
3249 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3251 if (first_rtl_op (code) > 0)
3252 arg0 = TREE_OPERAND (exp, 0);
3253 if (TREE_CODE_CLASS (code) == '<'
3254 || TREE_CODE_CLASS (code) == '1'
3255 || TREE_CODE_CLASS (code) == '2')
3256 type = TREE_TYPE (arg0);
3257 if (TREE_CODE_CLASS (code) == '2'
3258 || TREE_CODE_CLASS (code) == '<'
3259 || (TREE_CODE_CLASS (code) == 'e'
3260 && TREE_CODE_LENGTH (code) > 1))
3261 arg1 = TREE_OPERAND (exp, 1);
3264 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3265 lose a cast by accident. */
3266 if (type != NULL_TREE && orig_type == NULL_TREE)
3271 case TRUTH_NOT_EXPR:
3272 in_p = ! in_p, exp = arg0;
3275 case EQ_EXPR: case NE_EXPR:
3276 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3277 /* We can only do something if the range is testing for zero
3278 and if the second operand is an integer constant. Note that
3279 saying something is "in" the range we make is done by
3280 complementing IN_P since it will set in the initial case of
3281 being not equal to zero; "out" is leaving it alone. */
3282 if (low == 0 || high == 0
3283 || ! integer_zerop (low) || ! integer_zerop (high)
3284 || TREE_CODE (arg1) != INTEGER_CST)
3289 case NE_EXPR: /* - [c, c] */
3292 case EQ_EXPR: /* + [c, c] */
3293 in_p = ! in_p, low = high = arg1;
3295 case GT_EXPR: /* - [-, c] */
3296 low = 0, high = arg1;
3298 case GE_EXPR: /* + [c, -] */
3299 in_p = ! in_p, low = arg1, high = 0;
3301 case LT_EXPR: /* - [c, -] */
3302 low = arg1, high = 0;
3304 case LE_EXPR: /* + [-, c] */
3305 in_p = ! in_p, low = 0, high = arg1;
3313 /* If this is an unsigned comparison, we also know that EXP is
3314 greater than or equal to zero. We base the range tests we make
3315 on that fact, so we record it here so we can parse existing
3317 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3319 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3320 1, fold_convert (type, integer_zero_node),
3324 in_p = n_in_p, low = n_low, high = n_high;
3326 /* If the high bound is missing, but we have a nonzero low
3327 bound, reverse the range so it goes from zero to the low bound
3329 if (high == 0 && low && ! integer_zerop (low))
3332 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3333 integer_one_node, 0);
3334 low = fold_convert (type, integer_zero_node);
3340 /* (-x) IN [a,b] -> x in [-b, -a] */
3341 n_low = range_binop (MINUS_EXPR, type,
3342 fold_convert (type, integer_zero_node),
3344 n_high = range_binop (MINUS_EXPR, type,
3345 fold_convert (type, integer_zero_node),
3347 low = n_low, high = n_high;
3353 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3354 fold_convert (type, integer_one_node));
3357 case PLUS_EXPR: case MINUS_EXPR:
3358 if (TREE_CODE (arg1) != INTEGER_CST)
3361 /* If EXP is signed, any overflow in the computation is undefined,
3362 so we don't worry about it so long as our computations on
3363 the bounds don't overflow. For unsigned, overflow is defined
3364 and this is exactly the right thing. */
3365 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3366 type, low, 0, arg1, 0);
3367 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3368 type, high, 1, arg1, 0);
3369 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3370 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3373 /* Check for an unsigned range which has wrapped around the maximum
3374 value thus making n_high < n_low, and normalize it. */
3375 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3377 low = range_binop (PLUS_EXPR, type, n_high, 0,
3378 integer_one_node, 0);
3379 high = range_binop (MINUS_EXPR, type, n_low, 0,
3380 integer_one_node, 0);
3382 /* If the range is of the form +/- [ x+1, x ], we won't
3383 be able to normalize it. But then, it represents the
3384 whole range or the empty set, so make it
3386 if (tree_int_cst_equal (n_low, low)
3387 && tree_int_cst_equal (n_high, high))
3393 low = n_low, high = n_high;
3398 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3399 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3402 if (! INTEGRAL_TYPE_P (type)
3403 || (low != 0 && ! int_fits_type_p (low, type))
3404 || (high != 0 && ! int_fits_type_p (high, type)))
3407 n_low = low, n_high = high;
3410 n_low = fold_convert (type, n_low);
3413 n_high = fold_convert (type, n_high);
3415 /* If we're converting from an unsigned to a signed type,
3416 we will be doing the comparison as unsigned. The tests above
3417 have already verified that LOW and HIGH are both positive.
3419 So we have to make sure that the original unsigned value will
3420 be interpreted as positive. */
3421 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3423 tree equiv_type = (*lang_hooks.types.type_for_mode)
3424 (TYPE_MODE (type), 1);
3427 /* A range without an upper bound is, naturally, unbounded.
3428 Since convert would have cropped a very large value, use
3429 the max value for the destination type. */
3431 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3432 : TYPE_MAX_VALUE (type);
3434 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3435 high_positive = fold (build (RSHIFT_EXPR, type,
3439 integer_one_node)));
3441 /* If the low bound is specified, "and" the range with the
3442 range for which the original unsigned value will be
3446 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3447 1, n_low, n_high, 1,
3448 fold_convert (type, integer_zero_node),
3452 in_p = (n_in_p == in_p);
3456 /* Otherwise, "or" the range with the range of the input
3457 that will be interpreted as negative. */
3458 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3459 0, n_low, n_high, 1,
3460 fold_convert (type, integer_zero_node),
3464 in_p = (in_p != n_in_p);
3469 low = n_low, high = n_high;
3479 /* If EXP is a constant, we can evaluate whether this is true or false. */
3480 if (TREE_CODE (exp) == INTEGER_CST)
3482 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3484 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3490 *pin_p = in_p, *plow = low, *phigh = high;
3494 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3495 type, TYPE, return an expression to test if EXP is in (or out of, depending
3496 on IN_P) the range. */
3499 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3501 tree etype = TREE_TYPE (exp);
3505 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3506 return invert_truthvalue (value);
3508 if (low == 0 && high == 0)
3509 return fold_convert (type, integer_one_node);
3512 return fold (build (LE_EXPR, type, exp, high));
3515 return fold (build (GE_EXPR, type, exp, low));
3517 if (operand_equal_p (low, high, 0))
3518 return fold (build (EQ_EXPR, type, exp, low));
3520 if (integer_zerop (low))
3522 if (! TREE_UNSIGNED (etype))
3524 etype = (*lang_hooks.types.unsigned_type) (etype);
3525 high = fold_convert (etype, high);
3526 exp = fold_convert (etype, exp);
3528 return build_range_check (type, exp, 1, 0, high);
3531 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3532 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3534 unsigned HOST_WIDE_INT lo;
3538 prec = TYPE_PRECISION (etype);
3539 if (prec <= HOST_BITS_PER_WIDE_INT)
3542 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3546 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3547 lo = (unsigned HOST_WIDE_INT) -1;
3550 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3552 if (TREE_UNSIGNED (etype))
3554 etype = (*lang_hooks.types.signed_type) (etype);
3555 exp = fold_convert (etype, exp);
3557 return fold (build (GT_EXPR, type, exp,
3558 fold_convert (etype, integer_zero_node)));
3562 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3563 && ! TREE_OVERFLOW (value))
3564 return build_range_check (type,
3565 fold (build (MINUS_EXPR, etype, exp, low)),
3566 1, fold_convert (etype, integer_zero_node),
3572 /* Given two ranges, see if we can merge them into one. Return 1 if we
3573 can, 0 if we can't. Set the output range into the specified parameters. */
3576 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3577 tree high0, int in1_p, tree low1, tree high1)
3585 int lowequal = ((low0 == 0 && low1 == 0)
3586 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3587 low0, 0, low1, 0)));
3588 int highequal = ((high0 == 0 && high1 == 0)
3589 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3590 high0, 1, high1, 1)));
3592 /* Make range 0 be the range that starts first, or ends last if they
3593 start at the same value. Swap them if it isn't. */
3594 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3597 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3598 high1, 1, high0, 1))))
3600 temp = in0_p, in0_p = in1_p, in1_p = temp;
3601 tem = low0, low0 = low1, low1 = tem;
3602 tem = high0, high0 = high1, high1 = tem;
3605 /* Now flag two cases, whether the ranges are disjoint or whether the
3606 second range is totally subsumed in the first. Note that the tests
3607 below are simplified by the ones above. */
3608 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3609 high0, 1, low1, 0));
3610 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3611 high1, 1, high0, 1));
3613 /* We now have four cases, depending on whether we are including or
3614 excluding the two ranges. */
3617 /* If they don't overlap, the result is false. If the second range
3618 is a subset it is the result. Otherwise, the range is from the start
3619 of the second to the end of the first. */
3621 in_p = 0, low = high = 0;
3623 in_p = 1, low = low1, high = high1;
3625 in_p = 1, low = low1, high = high0;
3628 else if (in0_p && ! in1_p)
3630 /* If they don't overlap, the result is the first range. If they are
3631 equal, the result is false. If the second range is a subset of the
3632 first, and the ranges begin at the same place, we go from just after
3633 the end of the first range to the end of the second. If the second
3634 range is not a subset of the first, or if it is a subset and both
3635 ranges end at the same place, the range starts at the start of the
3636 first range and ends just before the second range.
3637 Otherwise, we can't describe this as a single range. */
3639 in_p = 1, low = low0, high = high0;
3640 else if (lowequal && highequal)
3641 in_p = 0, low = high = 0;
3642 else if (subset && lowequal)
3644 in_p = 1, high = high0;
3645 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3646 integer_one_node, 0);
3648 else if (! subset || highequal)
3650 in_p = 1, low = low0;
3651 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3652 integer_one_node, 0);
3658 else if (! in0_p && in1_p)
3660 /* If they don't overlap, the result is the second range. If the second
3661 is a subset of the first, the result is false. Otherwise,
3662 the range starts just after the first range and ends at the
3663 end of the second. */
3665 in_p = 1, low = low1, high = high1;
3666 else if (subset || highequal)
3667 in_p = 0, low = high = 0;
3670 in_p = 1, high = high1;
3671 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3672 integer_one_node, 0);
3678 /* The case where we are excluding both ranges. Here the complex case
3679 is if they don't overlap. In that case, the only time we have a
3680 range is if they are adjacent. If the second is a subset of the
3681 first, the result is the first. Otherwise, the range to exclude
3682 starts at the beginning of the first range and ends at the end of the
3686 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3687 range_binop (PLUS_EXPR, NULL_TREE,
3689 integer_one_node, 1),
3691 in_p = 0, low = low0, high = high1;
3696 in_p = 0, low = low0, high = high0;
3698 in_p = 0, low = low0, high = high1;
3701 *pin_p = in_p, *plow = low, *phigh = high;
3705 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3706 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3709 /* EXP is some logical combination of boolean tests. See if we can
3710 merge it into some range test. Return the new tree if so. */
3713 fold_range_test (tree exp)
3715 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3716 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3717 int in0_p, in1_p, in_p;
3718 tree low0, low1, low, high0, high1, high;
3719 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3720 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3723 /* If this is an OR operation, invert both sides; we will invert
3724 again at the end. */
3726 in0_p = ! in0_p, in1_p = ! in1_p;
3728 /* If both expressions are the same, if we can merge the ranges, and we
3729 can build the range test, return it or it inverted. If one of the
3730 ranges is always true or always false, consider it to be the same
3731 expression as the other. */
3732 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3733 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3735 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3737 : rhs != 0 ? rhs : integer_zero_node,
3739 return or_op ? invert_truthvalue (tem) : tem;
3741 /* On machines where the branch cost is expensive, if this is a
3742 short-circuited branch and the underlying object on both sides
3743 is the same, make a non-short-circuit operation. */
3744 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3745 && lhs != 0 && rhs != 0
3746 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3747 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3748 && operand_equal_p (lhs, rhs, 0))
3750 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3751 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3752 which cases we can't do this. */
3753 if (simple_operand_p (lhs))
3754 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3755 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3756 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3757 TREE_OPERAND (exp, 1));
3759 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3760 && ! CONTAINS_PLACEHOLDER_P (lhs))
3762 tree common = save_expr (lhs);
3764 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3765 or_op ? ! in0_p : in0_p,
3767 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3768 or_op ? ! in1_p : in1_p,
3770 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3771 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3772 TREE_TYPE (exp), lhs, rhs);
3779 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3780 bit value. Arrange things so the extra bits will be set to zero if and
3781 only if C is signed-extended to its full width. If MASK is nonzero,
3782 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3785 unextend (tree c, int p, int unsignedp, tree mask)
3787 tree type = TREE_TYPE (c);
3788 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3791 if (p == modesize || unsignedp)
3794 /* We work by getting just the sign bit into the low-order bit, then
3795 into the high-order bit, then sign-extend. We then XOR that value
3797 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3798 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3800 /* We must use a signed type in order to get an arithmetic right shift.
3801 However, we must also avoid introducing accidental overflows, so that
3802 a subsequent call to integer_zerop will work. Hence we must
3803 do the type conversion here. At this point, the constant is either
3804 zero or one, and the conversion to a signed type can never overflow.
3805 We could get an overflow if this conversion is done anywhere else. */
3806 if (TREE_UNSIGNED (type))
3807 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3809 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3810 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3812 temp = const_binop (BIT_AND_EXPR, temp,
3813 fold_convert (TREE_TYPE (c), mask), 0);
3814 /* If necessary, convert the type back to match the type of C. */
3815 if (TREE_UNSIGNED (type))
3816 temp = fold_convert (type, temp);
3818 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3821 /* Find ways of folding logical expressions of LHS and RHS:
3822 Try to merge two comparisons to the same innermost item.
3823 Look for range tests like "ch >= '0' && ch <= '9'".
3824 Look for combinations of simple terms on machines with expensive branches
3825 and evaluate the RHS unconditionally.
3827 For example, if we have p->a == 2 && p->b == 4 and we can make an
3828 object large enough to span both A and B, we can do this with a comparison
3829 against the object ANDed with the a mask.
3831 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3832 operations to do this with one comparison.
3834 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3835 function and the one above.
3837 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3838 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3840 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3843 We return the simplified tree or 0 if no optimization is possible. */
3846 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3848 /* If this is the "or" of two comparisons, we can do something if
3849 the comparisons are NE_EXPR. If this is the "and", we can do something
3850 if the comparisons are EQ_EXPR. I.e.,
3851 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3853 WANTED_CODE is this operation code. For single bit fields, we can
3854 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3855 comparison for one-bit fields. */
3857 enum tree_code wanted_code;
3858 enum tree_code lcode, rcode;
3859 tree ll_arg, lr_arg, rl_arg, rr_arg;
3860 tree ll_inner, lr_inner, rl_inner, rr_inner;
3861 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3862 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3863 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3864 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3865 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3866 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3867 enum machine_mode lnmode, rnmode;
3868 tree ll_mask, lr_mask, rl_mask, rr_mask;
3869 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3870 tree l_const, r_const;
3871 tree lntype, rntype, result;
3872 int first_bit, end_bit;
3875 /* Start by getting the comparison codes. Fail if anything is volatile.
3876 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3877 it were surrounded with a NE_EXPR. */
3879 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3882 lcode = TREE_CODE (lhs);
3883 rcode = TREE_CODE (rhs);
3885 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3886 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3888 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3889 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3891 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3894 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3897 ll_arg = TREE_OPERAND (lhs, 0);
3898 lr_arg = TREE_OPERAND (lhs, 1);
3899 rl_arg = TREE_OPERAND (rhs, 0);
3900 rr_arg = TREE_OPERAND (rhs, 1);
3902 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3903 if (simple_operand_p (ll_arg)
3904 && simple_operand_p (lr_arg)
3905 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3909 if (operand_equal_p (ll_arg, rl_arg, 0)
3910 && operand_equal_p (lr_arg, rr_arg, 0))
3912 int lcompcode, rcompcode;
3914 lcompcode = comparison_to_compcode (lcode);
3915 rcompcode = comparison_to_compcode (rcode);
3916 compcode = (code == TRUTH_AND_EXPR)
3917 ? lcompcode & rcompcode
3918 : lcompcode | rcompcode;
3920 else if (operand_equal_p (ll_arg, rr_arg, 0)
3921 && operand_equal_p (lr_arg, rl_arg, 0))
3923 int lcompcode, rcompcode;
3925 rcode = swap_tree_comparison (rcode);
3926 lcompcode = comparison_to_compcode (lcode);
3927 rcompcode = comparison_to_compcode (rcode);
3928 compcode = (code == TRUTH_AND_EXPR)
3929 ? lcompcode & rcompcode
3930 : lcompcode | rcompcode;
3935 if (compcode == COMPCODE_TRUE)
3936 return fold_convert (truth_type, integer_one_node);
3937 else if (compcode == COMPCODE_FALSE)
3938 return fold_convert (truth_type, integer_zero_node);
3939 else if (compcode != -1)
3940 return build (compcode_to_comparison (compcode),
3941 truth_type, ll_arg, lr_arg);
3944 /* If the RHS can be evaluated unconditionally and its operands are
3945 simple, it wins to evaluate the RHS unconditionally on machines
3946 with expensive branches. In this case, this isn't a comparison
3947 that can be merged. Avoid doing this if the RHS is a floating-point
3948 comparison since those can trap. */
3950 if (BRANCH_COST >= 2
3951 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3952 && simple_operand_p (rl_arg)
3953 && simple_operand_p (rr_arg))
3955 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3956 if (code == TRUTH_OR_EXPR
3957 && lcode == NE_EXPR && integer_zerop (lr_arg)
3958 && rcode == NE_EXPR && integer_zerop (rr_arg)
3959 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3960 return build (NE_EXPR, truth_type,
3961 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3965 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3966 if (code == TRUTH_AND_EXPR
3967 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3968 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3969 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3970 return build (EQ_EXPR, truth_type,
3971 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3975 return build (code, truth_type, lhs, rhs);
3978 /* See if the comparisons can be merged. Then get all the parameters for
3981 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3982 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3986 ll_inner = decode_field_reference (ll_arg,
3987 &ll_bitsize, &ll_bitpos, &ll_mode,
3988 &ll_unsignedp, &volatilep, &ll_mask,
3990 lr_inner = decode_field_reference (lr_arg,
3991 &lr_bitsize, &lr_bitpos, &lr_mode,
3992 &lr_unsignedp, &volatilep, &lr_mask,
3994 rl_inner = decode_field_reference (rl_arg,
3995 &rl_bitsize, &rl_bitpos, &rl_mode,
3996 &rl_unsignedp, &volatilep, &rl_mask,
3998 rr_inner = decode_field_reference (rr_arg,
3999 &rr_bitsize, &rr_bitpos, &rr_mode,
4000 &rr_unsignedp, &volatilep, &rr_mask,
4003 /* It must be true that the inner operation on the lhs of each
4004 comparison must be the same if we are to be able to do anything.
4005 Then see if we have constants. If not, the same must be true for
4007 if (volatilep || ll_inner == 0 || rl_inner == 0
4008 || ! operand_equal_p (ll_inner, rl_inner, 0))
4011 if (TREE_CODE (lr_arg) == INTEGER_CST
4012 && TREE_CODE (rr_arg) == INTEGER_CST)
4013 l_const = lr_arg, r_const = rr_arg;
4014 else if (lr_inner == 0 || rr_inner == 0
4015 || ! operand_equal_p (lr_inner, rr_inner, 0))
4018 l_const = r_const = 0;
4020 /* If either comparison code is not correct for our logical operation,
4021 fail. However, we can convert a one-bit comparison against zero into
4022 the opposite comparison against that bit being set in the field. */
4024 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4025 if (lcode != wanted_code)
4027 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4029 /* Make the left operand unsigned, since we are only interested
4030 in the value of one bit. Otherwise we are doing the wrong
4039 /* This is analogous to the code for l_const above. */
4040 if (rcode != wanted_code)
4042 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4051 /* After this point all optimizations will generate bit-field
4052 references, which we might not want. */
4053 if (! (*lang_hooks.can_use_bit_fields_p) ())
4056 /* See if we can find a mode that contains both fields being compared on
4057 the left. If we can't, fail. Otherwise, update all constants and masks
4058 to be relative to a field of that size. */
4059 first_bit = MIN (ll_bitpos, rl_bitpos);
4060 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4061 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4062 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4064 if (lnmode == VOIDmode)
4067 lnbitsize = GET_MODE_BITSIZE (lnmode);
4068 lnbitpos = first_bit & ~ (lnbitsize - 1);
4069 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4070 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4072 if (BYTES_BIG_ENDIAN)
4074 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4075 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4078 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4079 size_int (xll_bitpos), 0);
4080 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4081 size_int (xrl_bitpos), 0);
4085 l_const = fold_convert (lntype, l_const);
4086 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4087 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4088 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4089 fold (build1 (BIT_NOT_EXPR,
4093 warning ("comparison is always %d", wanted_code == NE_EXPR);
4095 return fold_convert (truth_type,
4096 wanted_code == NE_EXPR
4097 ? integer_one_node : integer_zero_node);
4102 r_const = fold_convert (lntype, r_const);
4103 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4104 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4105 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4106 fold (build1 (BIT_NOT_EXPR,
4110 warning ("comparison is always %d", wanted_code == NE_EXPR);
4112 return fold_convert (truth_type,
4113 wanted_code == NE_EXPR
4114 ? integer_one_node : integer_zero_node);
4118 /* If the right sides are not constant, do the same for it. Also,
4119 disallow this optimization if a size or signedness mismatch occurs
4120 between the left and right sides. */
4123 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4124 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4125 /* Make sure the two fields on the right
4126 correspond to the left without being swapped. */
4127 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4130 first_bit = MIN (lr_bitpos, rr_bitpos);
4131 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4132 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4133 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4135 if (rnmode == VOIDmode)
4138 rnbitsize = GET_MODE_BITSIZE (rnmode);
4139 rnbitpos = first_bit & ~ (rnbitsize - 1);
4140 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4141 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4143 if (BYTES_BIG_ENDIAN)
4145 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4146 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4149 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4150 size_int (xlr_bitpos), 0);
4151 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4152 size_int (xrr_bitpos), 0);
4154 /* Make a mask that corresponds to both fields being compared.
4155 Do this for both items being compared. If the operands are the
4156 same size and the bits being compared are in the same position
4157 then we can do this by masking both and comparing the masked
4159 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4160 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4161 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4163 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4164 ll_unsignedp || rl_unsignedp);
4165 if (! all_ones_mask_p (ll_mask, lnbitsize))
4166 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4168 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4169 lr_unsignedp || rr_unsignedp);
4170 if (! all_ones_mask_p (lr_mask, rnbitsize))
4171 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4173 return build (wanted_code, truth_type, lhs, rhs);
4176 /* There is still another way we can do something: If both pairs of
4177 fields being compared are adjacent, we may be able to make a wider
4178 field containing them both.
4180 Note that we still must mask the lhs/rhs expressions. Furthermore,
4181 the mask must be shifted to account for the shift done by
4182 make_bit_field_ref. */
4183 if ((ll_bitsize + ll_bitpos == rl_bitpos
4184 && lr_bitsize + lr_bitpos == rr_bitpos)
4185 || (ll_bitpos == rl_bitpos + rl_bitsize
4186 && lr_bitpos == rr_bitpos + rr_bitsize))
4190 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4191 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4192 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4193 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4195 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4196 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4197 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4198 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4200 /* Convert to the smaller type before masking out unwanted bits. */
4202 if (lntype != rntype)
4204 if (lnbitsize > rnbitsize)
4206 lhs = fold_convert (rntype, lhs);
4207 ll_mask = fold_convert (rntype, ll_mask);
4210 else if (lnbitsize < rnbitsize)
4212 rhs = fold_convert (lntype, rhs);
4213 lr_mask = fold_convert (lntype, lr_mask);
4218 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4219 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4221 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4222 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4224 return build (wanted_code, truth_type, lhs, rhs);
4230 /* Handle the case of comparisons with constants. If there is something in
4231 common between the masks, those bits of the constants must be the same.
4232 If not, the condition is always false. Test for this to avoid generating
4233 incorrect code below. */
4234 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4235 if (! integer_zerop (result)
4236 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4237 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4239 if (wanted_code == NE_EXPR)
4241 warning ("`or' of unmatched not-equal tests is always 1");
4242 return fold_convert (truth_type, integer_one_node);
4246 warning ("`and' of mutually exclusive equal-tests is always 0");
4247 return fold_convert (truth_type, integer_zero_node);
4251 /* Construct the expression we will return. First get the component
4252 reference we will make. Unless the mask is all ones the width of
4253 that field, perform the mask operation. Then compare with the
4255 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4256 ll_unsignedp || rl_unsignedp);
4258 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4259 if (! all_ones_mask_p (ll_mask, lnbitsize))
4260 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4262 return build (wanted_code, truth_type, result,
4263 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4266 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4270 optimize_minmax_comparison (tree t)
4272 tree type = TREE_TYPE (t);
4273 tree arg0 = TREE_OPERAND (t, 0);
4274 enum tree_code op_code;
4275 tree comp_const = TREE_OPERAND (t, 1);
4277 int consts_equal, consts_lt;
4280 STRIP_SIGN_NOPS (arg0);
4282 op_code = TREE_CODE (arg0);
4283 minmax_const = TREE_OPERAND (arg0, 1);
4284 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4285 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4286 inner = TREE_OPERAND (arg0, 0);
4288 /* If something does not permit us to optimize, return the original tree. */
4289 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4290 || TREE_CODE (comp_const) != INTEGER_CST
4291 || TREE_CONSTANT_OVERFLOW (comp_const)
4292 || TREE_CODE (minmax_const) != INTEGER_CST
4293 || TREE_CONSTANT_OVERFLOW (minmax_const))
4296 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4297 and GT_EXPR, doing the rest with recursive calls using logical
4299 switch (TREE_CODE (t))
4301 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4303 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4307 fold (build (TRUTH_ORIF_EXPR, type,
4308 optimize_minmax_comparison
4309 (build (EQ_EXPR, type, arg0, comp_const)),
4310 optimize_minmax_comparison
4311 (build (GT_EXPR, type, arg0, comp_const))));
4314 if (op_code == MAX_EXPR && consts_equal)
4315 /* MAX (X, 0) == 0 -> X <= 0 */
4316 return fold (build (LE_EXPR, type, inner, comp_const));
4318 else if (op_code == MAX_EXPR && consts_lt)
4319 /* MAX (X, 0) == 5 -> X == 5 */
4320 return fold (build (EQ_EXPR, type, inner, comp_const));
4322 else if (op_code == MAX_EXPR)
4323 /* MAX (X, 0) == -1 -> false */
4324 return omit_one_operand (type, integer_zero_node, inner);
4326 else if (consts_equal)
4327 /* MIN (X, 0) == 0 -> X >= 0 */
4328 return fold (build (GE_EXPR, type, inner, comp_const));
4331 /* MIN (X, 0) == 5 -> false */
4332 return omit_one_operand (type, integer_zero_node, inner);
4335 /* MIN (X, 0) == -1 -> X == -1 */
4336 return fold (build (EQ_EXPR, type, inner, comp_const));
4339 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4340 /* MAX (X, 0) > 0 -> X > 0
4341 MAX (X, 0) > 5 -> X > 5 */
4342 return fold (build (GT_EXPR, type, inner, comp_const));
4344 else if (op_code == MAX_EXPR)
4345 /* MAX (X, 0) > -1 -> true */
4346 return omit_one_operand (type, integer_one_node, inner);
4348 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4349 /* MIN (X, 0) > 0 -> false
4350 MIN (X, 0) > 5 -> false */
4351 return omit_one_operand (type, integer_zero_node, inner);
4354 /* MIN (X, 0) > -1 -> X > -1 */
4355 return fold (build (GT_EXPR, type, inner, comp_const));
4362 /* T is an integer expression that is being multiplied, divided, or taken a
4363 modulus (CODE says which and what kind of divide or modulus) by a
4364 constant C. See if we can eliminate that operation by folding it with
4365 other operations already in T. WIDE_TYPE, if non-null, is a type that
4366 should be used for the computation if wider than our type.
4368 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4369 (X * 2) + (Y * 4). We must, however, be assured that either the original
4370 expression would not overflow or that overflow is undefined for the type
4371 in the language in question.
4373 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4374 the machine has a multiply-accumulate insn or that this is part of an
4375 addressing calculation.
4377 If we return a non-null expression, it is an equivalent form of the
4378 original computation, but need not be in the original type. */
4381 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4383 /* To avoid exponential search depth, refuse to allow recursion past
4384 three levels. Beyond that (1) it's highly unlikely that we'll find
4385 something interesting and (2) we've probably processed it before
4386 when we built the inner expression. */
4395 ret = extract_muldiv_1 (t, c, code, wide_type);
4402 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4404 tree type = TREE_TYPE (t);
4405 enum tree_code tcode = TREE_CODE (t);
4406 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4407 > GET_MODE_SIZE (TYPE_MODE (type)))
4408 ? wide_type : type);
4410 int same_p = tcode == code;
4411 tree op0 = NULL_TREE, op1 = NULL_TREE;
4413 /* Don't deal with constants of zero here; they confuse the code below. */
4414 if (integer_zerop (c))
4417 if (TREE_CODE_CLASS (tcode) == '1')
4418 op0 = TREE_OPERAND (t, 0);
4420 if (TREE_CODE_CLASS (tcode) == '2')
4421 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4423 /* Note that we need not handle conditional operations here since fold
4424 already handles those cases. So just do arithmetic here. */
4428 /* For a constant, we can always simplify if we are a multiply
4429 or (for divide and modulus) if it is a multiple of our constant. */
4430 if (code == MULT_EXPR
4431 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4432 return const_binop (code, fold_convert (ctype, t),
4433 fold_convert (ctype, c), 0);
4436 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4437 /* If op0 is an expression ... */
4438 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4439 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4440 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4441 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4442 /* ... and is unsigned, and its type is smaller than ctype,
4443 then we cannot pass through as widening. */
4444 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4445 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4446 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4447 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4448 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4449 /* ... or its type is larger than ctype,
4450 then we cannot pass through this truncation. */
4451 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4452 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4453 /* ... or signedness changes for division or modulus,
4454 then we cannot pass through this conversion. */
4455 || (code != MULT_EXPR
4456 && (TREE_UNSIGNED (ctype)
4457 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4460 /* Pass the constant down and see if we can make a simplification. If
4461 we can, replace this expression with the inner simplification for
4462 possible later conversion to our or some other type. */
4463 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4464 && TREE_CODE (t2) == INTEGER_CST
4465 && ! TREE_CONSTANT_OVERFLOW (t2)
4466 && (0 != (t1 = extract_muldiv (op0, t2, code,
4468 ? ctype : NULL_TREE))))
4472 case NEGATE_EXPR: case ABS_EXPR:
4473 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4474 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4477 case MIN_EXPR: case MAX_EXPR:
4478 /* If widening the type changes the signedness, then we can't perform
4479 this optimization as that changes the result. */
4480 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4483 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4484 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4485 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4487 if (tree_int_cst_sgn (c) < 0)
4488 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4490 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4491 fold_convert (ctype, t2)));
4495 case WITH_RECORD_EXPR:
4496 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4497 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4498 TREE_OPERAND (t, 1));
4501 case LSHIFT_EXPR: case RSHIFT_EXPR:
4502 /* If the second operand is constant, this is a multiplication
4503 or floor division, by a power of two, so we can treat it that
4504 way unless the multiplier or divisor overflows. */
4505 if (TREE_CODE (op1) == INTEGER_CST
4506 /* const_binop may not detect overflow correctly,
4507 so check for it explicitly here. */
4508 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4509 && TREE_INT_CST_HIGH (op1) == 0
4510 && 0 != (t1 = fold_convert (ctype,
4511 const_binop (LSHIFT_EXPR,
4514 && ! TREE_OVERFLOW (t1))
4515 return extract_muldiv (build (tcode == LSHIFT_EXPR
4516 ? MULT_EXPR : FLOOR_DIV_EXPR,
4517 ctype, fold_convert (ctype, op0), t1),
4518 c, code, wide_type);
4521 case PLUS_EXPR: case MINUS_EXPR:
4522 /* See if we can eliminate the operation on both sides. If we can, we
4523 can return a new PLUS or MINUS. If we can't, the only remaining
4524 cases where we can do anything are if the second operand is a
4526 t1 = extract_muldiv (op0, c, code, wide_type);
4527 t2 = extract_muldiv (op1, c, code, wide_type);
4528 if (t1 != 0 && t2 != 0
4529 && (code == MULT_EXPR
4530 /* If not multiplication, we can only do this if both operands
4531 are divisible by c. */
4532 || (multiple_of_p (ctype, op0, c)
4533 && multiple_of_p (ctype, op1, c))))
4534 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4535 fold_convert (ctype, t2)));
4537 /* If this was a subtraction, negate OP1 and set it to be an addition.
4538 This simplifies the logic below. */
4539 if (tcode == MINUS_EXPR)
4540 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4542 if (TREE_CODE (op1) != INTEGER_CST)
4545 /* If either OP1 or C are negative, this optimization is not safe for
4546 some of the division and remainder types while for others we need
4547 to change the code. */
4548 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4550 if (code == CEIL_DIV_EXPR)
4551 code = FLOOR_DIV_EXPR;
4552 else if (code == FLOOR_DIV_EXPR)
4553 code = CEIL_DIV_EXPR;
4554 else if (code != MULT_EXPR
4555 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4559 /* If it's a multiply or a division/modulus operation of a multiple
4560 of our constant, do the operation and verify it doesn't overflow. */
4561 if (code == MULT_EXPR
4562 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4564 op1 = const_binop (code, fold_convert (ctype, op1),
4565 fold_convert (ctype, c), 0);
4566 /* We allow the constant to overflow with wrapping semantics. */
4568 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4574 /* If we have an unsigned type is not a sizetype, we cannot widen
4575 the operation since it will change the result if the original
4576 computation overflowed. */
4577 if (TREE_UNSIGNED (ctype)
4578 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4582 /* If we were able to eliminate our operation from the first side,
4583 apply our operation to the second side and reform the PLUS. */
4584 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4585 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4587 /* The last case is if we are a multiply. In that case, we can
4588 apply the distributive law to commute the multiply and addition
4589 if the multiplication of the constants doesn't overflow. */
4590 if (code == MULT_EXPR)
4591 return fold (build (tcode, ctype,
4592 fold (build (code, ctype,
4593 fold_convert (ctype, op0),
4594 fold_convert (ctype, c))),
4600 /* We have a special case here if we are doing something like
4601 (C * 8) % 4 since we know that's zero. */
4602 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4603 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4604 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4605 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4606 return omit_one_operand (type, integer_zero_node, op0);
4608 /* ... fall through ... */
4610 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4611 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4612 /* If we can extract our operation from the LHS, do so and return a
4613 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4614 do something only if the second operand is a constant. */
4616 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4617 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4618 fold_convert (ctype, op1)));
4619 else if (tcode == MULT_EXPR && code == MULT_EXPR
4620 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4621 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4622 fold_convert (ctype, t1)));
4623 else if (TREE_CODE (op1) != INTEGER_CST)
4626 /* If these are the same operation types, we can associate them
4627 assuming no overflow. */
4629 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4630 fold_convert (ctype, c), 0))
4631 && ! TREE_OVERFLOW (t1))
4632 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4634 /* If these operations "cancel" each other, we have the main
4635 optimizations of this pass, which occur when either constant is a
4636 multiple of the other, in which case we replace this with either an
4637 operation or CODE or TCODE.
4639 If we have an unsigned type that is not a sizetype, we cannot do
4640 this since it will change the result if the original computation
4642 if ((! TREE_UNSIGNED (ctype)
4643 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4645 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4646 || (tcode == MULT_EXPR
4647 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4648 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4650 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4651 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4652 fold_convert (ctype,
4653 const_binop (TRUNC_DIV_EXPR,
4655 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4656 return fold (build (code, ctype, fold_convert (ctype, op0),
4657 fold_convert (ctype,
4658 const_binop (TRUNC_DIV_EXPR,
4670 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4671 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4672 that we may sometimes modify the tree. */
4675 strip_compound_expr (tree t, tree s)
4677 enum tree_code code = TREE_CODE (t);
4679 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4680 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4681 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4682 return TREE_OPERAND (t, 1);
4684 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4685 don't bother handling any other types. */
4686 else if (code == COND_EXPR)
4688 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4689 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4690 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4692 else if (TREE_CODE_CLASS (code) == '1')
4693 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4694 else if (TREE_CODE_CLASS (code) == '<'
4695 || TREE_CODE_CLASS (code) == '2')
4697 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4698 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4704 /* Return a node which has the indicated constant VALUE (either 0 or
4705 1), and is of the indicated TYPE. */
4708 constant_boolean_node (int value, tree type)
4710 if (type == integer_type_node)
4711 return value ? integer_one_node : integer_zero_node;
4712 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4713 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4717 tree t = build_int_2 (value, 0);
4719 TREE_TYPE (t) = type;
4724 /* Utility function for the following routine, to see how complex a nesting of
4725 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4726 we don't care (to avoid spending too much time on complex expressions.). */
4729 count_cond (tree expr, int lim)
4733 if (TREE_CODE (expr) != COND_EXPR)
4738 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4739 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4740 return MIN (lim, 1 + ctrue + cfalse);
4743 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4744 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4745 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4746 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4747 COND is the first argument to CODE; otherwise (as in the example
4748 given here), it is the second argument. TYPE is the type of the
4749 original expression. */
4752 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4753 tree cond, tree arg, int cond_first_p)
4755 tree test, true_value, false_value;
4756 tree lhs = NULL_TREE;
4757 tree rhs = NULL_TREE;
4758 /* In the end, we'll produce a COND_EXPR. Both arms of the
4759 conditional expression will be binary operations. The left-hand
4760 side of the expression to be executed if the condition is true
4761 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4762 of the expression to be executed if the condition is true will be
4763 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4764 but apply to the expression to be executed if the conditional is
4770 /* These are the codes to use for the left-hand side and right-hand
4771 side of the COND_EXPR. Normally, they are the same as CODE. */
4772 enum tree_code lhs_code = code;
4773 enum tree_code rhs_code = code;
4774 /* And these are the types of the expressions. */
4775 tree lhs_type = type;
4776 tree rhs_type = type;
4781 true_rhs = false_rhs = &arg;
4782 true_lhs = &true_value;
4783 false_lhs = &false_value;
4787 true_lhs = false_lhs = &arg;
4788 true_rhs = &true_value;
4789 false_rhs = &false_value;
4792 if (TREE_CODE (cond) == COND_EXPR)
4794 test = TREE_OPERAND (cond, 0);
4795 true_value = TREE_OPERAND (cond, 1);
4796 false_value = TREE_OPERAND (cond, 2);
4797 /* If this operand throws an expression, then it does not make
4798 sense to try to perform a logical or arithmetic operation
4799 involving it. Instead of building `a + throw 3' for example,
4800 we simply build `a, throw 3'. */
4801 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4805 lhs_code = COMPOUND_EXPR;
4806 lhs_type = void_type_node;
4811 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4815 rhs_code = COMPOUND_EXPR;
4816 rhs_type = void_type_node;
4824 tree testtype = TREE_TYPE (cond);
4826 true_value = fold_convert (testtype, integer_one_node);
4827 false_value = fold_convert (testtype, integer_zero_node);
4830 /* If ARG is complex we want to make sure we only evaluate it once. Though
4831 this is only required if it is volatile, it might be more efficient even
4832 if it is not. However, if we succeed in folding one part to a constant,
4833 we do not need to make this SAVE_EXPR. Since we do this optimization
4834 primarily to see if we do end up with constant and this SAVE_EXPR
4835 interferes with later optimizations, suppressing it when we can is
4838 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4839 do so. Don't try to see if the result is a constant if an arm is a
4840 COND_EXPR since we get exponential behavior in that case. */
4842 if (saved_expr_p (arg))
4844 else if (lhs == 0 && rhs == 0
4845 && !TREE_CONSTANT (arg)
4846 && (*lang_hooks.decls.global_bindings_p) () == 0
4847 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4848 || TREE_SIDE_EFFECTS (arg)))
4850 if (TREE_CODE (true_value) != COND_EXPR)
4851 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4853 if (TREE_CODE (false_value) != COND_EXPR)
4854 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4856 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4857 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4859 arg = save_expr (arg);
4861 save = saved_expr_p (arg);
4866 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4868 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4870 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4872 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4873 ahead of the COND_EXPR we made. Otherwise we would have it only
4874 evaluated in one branch, with the other branch using the result
4875 but missing the evaluation code. Beware that the save_expr call
4876 above might not return a SAVE_EXPR, so testing the TREE_CODE
4877 of ARG is not enough to decide here. Â */
4879 return build (COMPOUND_EXPR, type,
4880 fold_convert (void_type_node, arg),
4881 strip_compound_expr (test, arg));
4883 return fold_convert (type, test);
4887 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4889 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4890 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4891 ADDEND is the same as X.
4893 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4894 and finite. The problematic cases are when X is zero, and its mode
4895 has signed zeros. In the case of rounding towards -infinity,
4896 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4897 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4900 fold_real_zero_addition_p (tree type, tree addend, int negate)
4902 if (!real_zerop (addend))
4905 /* Don't allow the fold with -fsignaling-nans. */
4906 if (HONOR_SNANS (TYPE_MODE (type)))
4909 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4910 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4913 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4914 if (TREE_CODE (addend) == REAL_CST
4915 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4918 /* The mode has signed zeros, and we have to honor their sign.
4919 In this situation, there is only one case we can return true for.
4920 X - 0 is the same as X unless rounding towards -infinity is
4922 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4925 /* Subroutine of fold() that checks comparisons of built-in math
4926 functions against real constants.
4928 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4929 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4930 is the type of the result and ARG0 and ARG1 are the operands of the
4931 comparison. ARG1 must be a TREE_REAL_CST.
4933 The function returns the constant folded tree if a simplification
4934 can be made, and NULL_TREE otherwise. */
4937 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4938 tree type, tree arg0, tree arg1)
4942 if (fcode == BUILT_IN_SQRT
4943 || fcode == BUILT_IN_SQRTF
4944 || fcode == BUILT_IN_SQRTL)
4946 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4947 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4949 c = TREE_REAL_CST (arg1);
4950 if (REAL_VALUE_NEGATIVE (c))
4952 /* sqrt(x) < y is always false, if y is negative. */
4953 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4954 return omit_one_operand (type,
4955 fold_convert (type, integer_zero_node),
4958 /* sqrt(x) > y is always true, if y is negative and we
4959 don't care about NaNs, i.e. negative values of x. */
4960 if (code == NE_EXPR || !HONOR_NANS (mode))
4961 return omit_one_operand (type,
4962 fold_convert (type, integer_one_node),
4965 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4966 return fold (build (GE_EXPR, type, arg,
4967 build_real (TREE_TYPE (arg), dconst0)));
4969 else if (code == GT_EXPR || code == GE_EXPR)
4973 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4974 real_convert (&c2, mode, &c2);
4976 if (REAL_VALUE_ISINF (c2))
4978 /* sqrt(x) > y is x == +Inf, when y is very large. */
4979 if (HONOR_INFINITIES (mode))
4980 return fold (build (EQ_EXPR, type, arg,
4981 build_real (TREE_TYPE (arg), c2)));
4983 /* sqrt(x) > y is always false, when y is very large
4984 and we don't care about infinities. */
4985 return omit_one_operand (type,
4986 fold_convert (type, integer_zero_node),
4990 /* sqrt(x) > c is the same as x > c*c. */
4991 return fold (build (code, type, arg,
4992 build_real (TREE_TYPE (arg), c2)));
4994 else if (code == LT_EXPR || code == LE_EXPR)
4998 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4999 real_convert (&c2, mode, &c2);
5001 if (REAL_VALUE_ISINF (c2))
5003 /* sqrt(x) < y is always true, when y is a very large
5004 value and we don't care about NaNs or Infinities. */
5005 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5006 return omit_one_operand (type,
5007 fold_convert (type, integer_one_node),
5010 /* sqrt(x) < y is x != +Inf when y is very large and we
5011 don't care about NaNs. */
5012 if (! HONOR_NANS (mode))
5013 return fold (build (NE_EXPR, type, arg,
5014 build_real (TREE_TYPE (arg), c2)));
5016 /* sqrt(x) < y is x >= 0 when y is very large and we
5017 don't care about Infinities. */
5018 if (! HONOR_INFINITIES (mode))
5019 return fold (build (GE_EXPR, type, arg,
5020 build_real (TREE_TYPE (arg), dconst0)));
5022 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5023 if ((*lang_hooks.decls.global_bindings_p) () != 0
5024 || CONTAINS_PLACEHOLDER_P (arg))
5027 arg = save_expr (arg);
5028 return fold (build (TRUTH_ANDIF_EXPR, type,
5029 fold (build (GE_EXPR, type, arg,
5030 build_real (TREE_TYPE (arg),
5032 fold (build (NE_EXPR, type, arg,
5033 build_real (TREE_TYPE (arg),
5037 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5038 if (! HONOR_NANS (mode))
5039 return fold (build (code, type, arg,
5040 build_real (TREE_TYPE (arg), c2)));
5042 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5043 if ((*lang_hooks.decls.global_bindings_p) () == 0
5044 && ! CONTAINS_PLACEHOLDER_P (arg))
5046 arg = save_expr (arg);
5047 return fold (build (TRUTH_ANDIF_EXPR, type,
5048 fold (build (GE_EXPR, type, arg,
5049 build_real (TREE_TYPE (arg),
5051 fold (build (code, type, arg,
5052 build_real (TREE_TYPE (arg),
5061 /* Subroutine of fold() that optimizes comparisons against Infinities,
5062 either +Inf or -Inf.
5064 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5065 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5066 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5068 The function returns the constant folded tree if a simplification
5069 can be made, and NULL_TREE otherwise. */
5072 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5074 enum machine_mode mode;
5075 REAL_VALUE_TYPE max;
5079 mode = TYPE_MODE (TREE_TYPE (arg0));
5081 /* For negative infinity swap the sense of the comparison. */
5082 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5084 code = swap_tree_comparison (code);
5089 /* x > +Inf is always false, if with ignore sNANs. */
5090 if (HONOR_SNANS (mode))
5092 return omit_one_operand (type,
5093 fold_convert (type, integer_zero_node),
5097 /* x <= +Inf is always true, if we don't case about NaNs. */
5098 if (! HONOR_NANS (mode))
5099 return omit_one_operand (type,
5100 fold_convert (type, integer_one_node),
5103 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5104 if ((*lang_hooks.decls.global_bindings_p) () == 0
5105 && ! CONTAINS_PLACEHOLDER_P (arg0))
5107 arg0 = save_expr (arg0);
5108 return fold (build (EQ_EXPR, type, arg0, arg0));
5114 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5115 real_maxval (&max, neg, mode);
5116 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5117 arg0, build_real (TREE_TYPE (arg0), max)));
5120 /* x < +Inf is always equal to x <= DBL_MAX. */
5121 real_maxval (&max, neg, mode);
5122 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5123 arg0, build_real (TREE_TYPE (arg0), max)));
5126 /* x != +Inf is always equal to !(x > DBL_MAX). */
5127 real_maxval (&max, neg, mode);
5128 if (! HONOR_NANS (mode))
5129 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5130 arg0, build_real (TREE_TYPE (arg0), max)));
5131 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5132 arg0, build_real (TREE_TYPE (arg0), max)));
5133 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5142 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5143 equality/inequality test, then return a simplified form of
5144 the test using shifts and logical operations. Otherwise return
5145 NULL. TYPE is the desired result type. */
5148 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5151 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5153 if (code == TRUTH_NOT_EXPR)
5155 code = TREE_CODE (arg0);
5156 if (code != NE_EXPR && code != EQ_EXPR)
5159 /* Extract the arguments of the EQ/NE. */
5160 arg1 = TREE_OPERAND (arg0, 1);
5161 arg0 = TREE_OPERAND (arg0, 0);
5163 /* This requires us to invert the code. */
5164 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5167 /* If this is testing a single bit, we can optimize the test. */
5168 if ((code == NE_EXPR || code == EQ_EXPR)
5169 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5170 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5172 tree inner = TREE_OPERAND (arg0, 0);
5173 tree type = TREE_TYPE (arg0);
5174 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5175 enum machine_mode operand_mode = TYPE_MODE (type);
5177 tree signed_type, unsigned_type, intermediate_type;
5180 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5181 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5182 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5183 if (arg00 != NULL_TREE
5184 /* This is only a win if casting to a signed type is cheap,
5185 i.e. when arg00's type is not a partial mode. */
5186 && TYPE_PRECISION (TREE_TYPE (arg00))
5187 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5189 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5190 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5191 fold_convert (stype, arg00),
5192 fold_convert (stype, integer_zero_node)));
5195 /* Otherwise we have (A & C) != 0 where C is a single bit,
5196 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5197 Similarly for (A & C) == 0. */
5199 /* If INNER is a right shift of a constant and it plus BITNUM does
5200 not overflow, adjust BITNUM and INNER. */
5201 if (TREE_CODE (inner) == RSHIFT_EXPR
5202 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5203 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5204 && bitnum < TYPE_PRECISION (type)
5205 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5206 bitnum - TYPE_PRECISION (type)))
5208 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5209 inner = TREE_OPERAND (inner, 0);
5212 /* If we are going to be able to omit the AND below, we must do our
5213 operations as unsigned. If we must use the AND, we have a choice.
5214 Normally unsigned is faster, but for some machines signed is. */
5215 #ifdef LOAD_EXTEND_OP
5216 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5221 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5222 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5223 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5224 inner = fold_convert (intermediate_type, inner);
5227 inner = build (RSHIFT_EXPR, intermediate_type,
5228 inner, size_int (bitnum));
5230 if (code == EQ_EXPR)
5231 inner = build (BIT_XOR_EXPR, intermediate_type,
5232 inner, integer_one_node);
5234 /* Put the AND last so it can combine with more things. */
5235 inner = build (BIT_AND_EXPR, intermediate_type,
5236 inner, integer_one_node);
5238 /* Make sure to return the proper type. */
5239 inner = fold_convert (result_type, inner);
5246 /* Check whether we are allowed to reorder operands arg0 and arg1,
5247 such that the evaluation of arg1 occurs before arg0. */
5250 reorder_operands_p (tree arg0, tree arg1)
5252 if (! flag_evaluation_order)
5254 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5256 return ! TREE_SIDE_EFFECTS (arg0)
5257 && ! TREE_SIDE_EFFECTS (arg1);
5260 /* Test whether it is preferable two swap two operands, ARG0 and
5261 ARG1, for example because ARG0 is an integer constant and ARG1
5262 isn't. If REORDER is true, only recommend swapping if we can
5263 evaluate the operands in reverse order. */
5266 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5268 STRIP_SIGN_NOPS (arg0);
5269 STRIP_SIGN_NOPS (arg1);
5271 if (TREE_CODE (arg1) == INTEGER_CST)
5273 if (TREE_CODE (arg0) == INTEGER_CST)
5276 if (TREE_CODE (arg1) == REAL_CST)
5278 if (TREE_CODE (arg0) == REAL_CST)
5281 if (TREE_CODE (arg1) == COMPLEX_CST)
5283 if (TREE_CODE (arg0) == COMPLEX_CST)
5286 if (TREE_CONSTANT (arg1))
5288 if (TREE_CONSTANT (arg0))
5294 if (reorder && flag_evaluation_order
5295 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5306 /* Perform constant folding and related simplification of EXPR.
5307 The related simplifications include x*1 => x, x*0 => 0, etc.,
5308 and application of the associative law.
5309 NOP_EXPR conversions may be removed freely (as long as we
5310 are careful not to change the C type of the overall expression)
5311 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5312 but we can constant-fold them if they have constant operands. */
5314 #ifdef ENABLE_FOLD_CHECKING
5315 # define fold(x) fold_1 (x)
5316 static tree fold_1 (tree);
5322 tree t = expr, orig_t;
5323 tree t1 = NULL_TREE;
5325 tree type = TREE_TYPE (expr);
5326 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5327 enum tree_code code = TREE_CODE (t);
5328 int kind = TREE_CODE_CLASS (code);
5330 /* WINS will be nonzero when the switch is done
5331 if all operands are constant. */
5334 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5335 Likewise for a SAVE_EXPR that's already been evaluated. */
5336 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5339 /* Return right away if a constant. */
5345 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5349 /* Special case for conversion ops that can have fixed point args. */
5350 arg0 = TREE_OPERAND (t, 0);
5352 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5354 STRIP_SIGN_NOPS (arg0);
5356 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5357 subop = TREE_REALPART (arg0);
5361 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5362 && TREE_CODE (subop) != REAL_CST)
5363 /* Note that TREE_CONSTANT isn't enough:
5364 static var addresses are constant but we can't
5365 do arithmetic on them. */
5368 else if (IS_EXPR_CODE_CLASS (kind))
5370 int len = first_rtl_op (code);
5372 for (i = 0; i < len; i++)
5374 tree op = TREE_OPERAND (t, i);
5378 continue; /* Valid for CALL_EXPR, at least. */
5380 if (kind == '<' || code == RSHIFT_EXPR)
5382 /* Signedness matters here. Perhaps we can refine this
5384 STRIP_SIGN_NOPS (op);
5387 /* Strip any conversions that don't change the mode. */
5390 if (TREE_CODE (op) == COMPLEX_CST)
5391 subop = TREE_REALPART (op);
5395 if (TREE_CODE (subop) != INTEGER_CST
5396 && TREE_CODE (subop) != REAL_CST)
5397 /* Note that TREE_CONSTANT isn't enough:
5398 static var addresses are constant but we can't
5399 do arithmetic on them. */
5409 /* If this is a commutative operation, and ARG0 is a constant, move it
5410 to ARG1 to reduce the number of tests below. */
5411 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5412 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5413 || code == BIT_AND_EXPR)
5414 && tree_swap_operands_p (arg0, arg1, true))
5415 return fold (build (code, type, TREE_OPERAND (t, 1),
5416 TREE_OPERAND (t, 0)));
5418 /* Now WINS is set as described above,
5419 ARG0 is the first operand of EXPR,
5420 and ARG1 is the second operand (if it has more than one operand).
5422 First check for cases where an arithmetic operation is applied to a
5423 compound, conditional, or comparison operation. Push the arithmetic
5424 operation inside the compound or conditional to see if any folding
5425 can then be done. Convert comparison to conditional for this purpose.
5426 The also optimizes non-constant cases that used to be done in
5429 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5430 one of the operands is a comparison and the other is a comparison, a
5431 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5432 code below would make the expression more complex. Change it to a
5433 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5434 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5436 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5437 || code == EQ_EXPR || code == NE_EXPR)
5438 && ((truth_value_p (TREE_CODE (arg0))
5439 && (truth_value_p (TREE_CODE (arg1))
5440 || (TREE_CODE (arg1) == BIT_AND_EXPR
5441 && integer_onep (TREE_OPERAND (arg1, 1)))))
5442 || (truth_value_p (TREE_CODE (arg1))
5443 && (truth_value_p (TREE_CODE (arg0))
5444 || (TREE_CODE (arg0) == BIT_AND_EXPR
5445 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5447 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5448 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5452 if (code == EQ_EXPR)
5453 t = invert_truthvalue (t);
5458 if (TREE_CODE_CLASS (code) == '1')
5460 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5461 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5462 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5463 else if (TREE_CODE (arg0) == COND_EXPR)
5465 tree arg01 = TREE_OPERAND (arg0, 1);
5466 tree arg02 = TREE_OPERAND (arg0, 2);
5467 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5468 arg01 = fold (build1 (code, type, arg01));
5469 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5470 arg02 = fold (build1 (code, type, arg02));
5471 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5474 /* If this was a conversion, and all we did was to move into
5475 inside the COND_EXPR, bring it back out. But leave it if
5476 it is a conversion from integer to integer and the
5477 result precision is no wider than a word since such a
5478 conversion is cheap and may be optimized away by combine,
5479 while it couldn't if it were outside the COND_EXPR. Then return
5480 so we don't get into an infinite recursion loop taking the
5481 conversion out and then back in. */
5483 if ((code == NOP_EXPR || code == CONVERT_EXPR
5484 || code == NON_LVALUE_EXPR)
5485 && TREE_CODE (t) == COND_EXPR
5486 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5487 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5488 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5489 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5490 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5491 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5492 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5494 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5495 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5496 t = build1 (code, type,
5498 TREE_TYPE (TREE_OPERAND
5499 (TREE_OPERAND (t, 1), 0)),
5500 TREE_OPERAND (t, 0),
5501 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5502 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5505 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5506 return fold (build (COND_EXPR, type, arg0,
5507 fold (build1 (code, type, integer_one_node)),
5508 fold (build1 (code, type, integer_zero_node))));
5510 else if (TREE_CODE_CLASS (code) == '<'
5511 && TREE_CODE (arg0) == COMPOUND_EXPR)
5512 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5513 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5514 else if (TREE_CODE_CLASS (code) == '<'
5515 && TREE_CODE (arg1) == COMPOUND_EXPR)
5516 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5517 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5518 else if (TREE_CODE_CLASS (code) == '2'
5519 || TREE_CODE_CLASS (code) == '<')
5521 if (TREE_CODE (arg1) == COMPOUND_EXPR
5522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5523 && ! TREE_SIDE_EFFECTS (arg0))
5524 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5525 fold (build (code, type,
5526 arg0, TREE_OPERAND (arg1, 1))));
5527 else if ((TREE_CODE (arg1) == COND_EXPR
5528 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5529 && TREE_CODE_CLASS (code) != '<'))
5530 && (TREE_CODE (arg0) != COND_EXPR
5531 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5532 && (! TREE_SIDE_EFFECTS (arg0)
5533 || ((*lang_hooks.decls.global_bindings_p) () == 0
5534 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5536 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5537 /*cond_first_p=*/0);
5538 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5539 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5540 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5541 else if ((TREE_CODE (arg0) == COND_EXPR
5542 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5543 && TREE_CODE_CLASS (code) != '<'))
5544 && (TREE_CODE (arg1) != COND_EXPR
5545 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5546 && (! TREE_SIDE_EFFECTS (arg1)
5547 || ((*lang_hooks.decls.global_bindings_p) () == 0
5548 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5550 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5551 /*cond_first_p=*/1);
5565 return fold (DECL_INITIAL (t));
5570 case FIX_TRUNC_EXPR:
5571 /* Other kinds of FIX are not handled properly by fold_convert. */
5573 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5574 return TREE_OPERAND (t, 0);
5576 /* Handle cases of two conversions in a row. */
5577 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5578 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5580 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5581 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5582 tree final_type = TREE_TYPE (t);
5583 int inside_int = INTEGRAL_TYPE_P (inside_type);
5584 int inside_ptr = POINTER_TYPE_P (inside_type);
5585 int inside_float = FLOAT_TYPE_P (inside_type);
5586 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5587 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5588 int inter_int = INTEGRAL_TYPE_P (inter_type);
5589 int inter_ptr = POINTER_TYPE_P (inter_type);
5590 int inter_float = FLOAT_TYPE_P (inter_type);
5591 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5592 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5593 int final_int = INTEGRAL_TYPE_P (final_type);
5594 int final_ptr = POINTER_TYPE_P (final_type);
5595 int final_float = FLOAT_TYPE_P (final_type);
5596 unsigned int final_prec = TYPE_PRECISION (final_type);
5597 int final_unsignedp = TREE_UNSIGNED (final_type);
5599 /* In addition to the cases of two conversions in a row
5600 handled below, if we are converting something to its own
5601 type via an object of identical or wider precision, neither
5602 conversion is needed. */
5603 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5604 && ((inter_int && final_int) || (inter_float && final_float))
5605 && inter_prec >= final_prec)
5606 return fold (build1 (code, final_type,
5607 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5609 /* Likewise, if the intermediate and final types are either both
5610 float or both integer, we don't need the middle conversion if
5611 it is wider than the final type and doesn't change the signedness
5612 (for integers). Avoid this if the final type is a pointer
5613 since then we sometimes need the inner conversion. Likewise if
5614 the outer has a precision not equal to the size of its mode. */
5615 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5616 || (inter_float && inside_float))
5617 && inter_prec >= inside_prec
5618 && (inter_float || inter_unsignedp == inside_unsignedp)
5619 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5620 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5622 return fold (build1 (code, final_type,
5623 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5625 /* If we have a sign-extension of a zero-extended value, we can
5626 replace that by a single zero-extension. */
5627 if (inside_int && inter_int && final_int
5628 && inside_prec < inter_prec && inter_prec < final_prec
5629 && inside_unsignedp && !inter_unsignedp)
5630 return fold (build1 (code, final_type,
5631 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5633 /* Two conversions in a row are not needed unless:
5634 - some conversion is floating-point (overstrict for now), or
5635 - the intermediate type is narrower than both initial and
5637 - the intermediate type and innermost type differ in signedness,
5638 and the outermost type is wider than the intermediate, or
5639 - the initial type is a pointer type and the precisions of the
5640 intermediate and final types differ, or
5641 - the final type is a pointer type and the precisions of the
5642 initial and intermediate types differ. */
5643 if (! inside_float && ! inter_float && ! final_float
5644 && (inter_prec > inside_prec || inter_prec > final_prec)
5645 && ! (inside_int && inter_int
5646 && inter_unsignedp != inside_unsignedp
5647 && inter_prec < final_prec)
5648 && ((inter_unsignedp && inter_prec > inside_prec)
5649 == (final_unsignedp && final_prec > inter_prec))
5650 && ! (inside_ptr && inter_prec != final_prec)
5651 && ! (final_ptr && inside_prec != inter_prec)
5652 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5653 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5655 return fold (build1 (code, final_type,
5656 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5659 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5660 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5661 /* Detect assigning a bitfield. */
5662 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5663 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5665 /* Don't leave an assignment inside a conversion
5666 unless assigning a bitfield. */
5667 tree prev = TREE_OPERAND (t, 0);
5670 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5671 /* First do the assignment, then return converted constant. */
5672 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5673 TREE_NO_UNUSED_WARNING (t) = 1;
5678 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5679 constants (if x has signed type, the sign bit cannot be set
5680 in c). This folds extension into the BIT_AND_EXPR. */
5681 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5682 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5683 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5684 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5686 tree and = TREE_OPERAND (t, 0);
5687 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5690 if (TREE_UNSIGNED (TREE_TYPE (and))
5691 || (TYPE_PRECISION (TREE_TYPE (t))
5692 <= TYPE_PRECISION (TREE_TYPE (and))))
5694 else if (TYPE_PRECISION (TREE_TYPE (and1))
5695 <= HOST_BITS_PER_WIDE_INT
5696 && host_integerp (and1, 1))
5698 unsigned HOST_WIDE_INT cst;
5700 cst = tree_low_cst (and1, 1);
5701 cst &= (HOST_WIDE_INT) -1
5702 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5703 change = (cst == 0);
5704 #ifdef LOAD_EXTEND_OP
5706 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5709 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5710 and0 = fold_convert (uns, and0);
5711 and1 = fold_convert (uns, and1);
5716 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5717 fold_convert (TREE_TYPE (t), and0),
5718 fold_convert (TREE_TYPE (t), and1)));
5721 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5722 return tem ? tem : t;
5724 case VIEW_CONVERT_EXPR:
5725 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5726 return build1 (VIEW_CONVERT_EXPR, type,
5727 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5731 if (TREE_CODE (arg0) == CONSTRUCTOR
5732 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5734 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5741 if (TREE_CONSTANT (t) != wins)
5745 TREE_CONSTANT (t) = wins;
5750 if (negate_expr_p (arg0))
5751 return fold_convert (type, negate_expr (arg0));
5757 if (TREE_CODE (arg0) == INTEGER_CST)
5759 /* If the value is unsigned, then the absolute value is
5760 the same as the ordinary value. */
5761 if (TREE_UNSIGNED (type))
5763 /* Similarly, if the value is non-negative. */
5764 else if (INT_CST_LT (integer_minus_one_node, arg0))
5766 /* If the value is negative, then the absolute value is
5770 unsigned HOST_WIDE_INT low;
5772 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5773 TREE_INT_CST_HIGH (arg0),
5775 t = build_int_2 (low, high);
5776 TREE_TYPE (t) = type;
5778 = (TREE_OVERFLOW (arg0)
5779 | force_fit_type (t, overflow));
5780 TREE_CONSTANT_OVERFLOW (t)
5781 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5784 else if (TREE_CODE (arg0) == REAL_CST)
5786 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5787 t = build_real (type,
5788 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5791 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5792 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5793 /* Convert fabs((double)float) into (double)fabsf(float). */
5794 else if (TREE_CODE (arg0) == NOP_EXPR
5795 && TREE_CODE (type) == REAL_TYPE)
5797 tree targ0 = strip_float_extensions (arg0);
5799 return fold_convert (type, fold (build1 (ABS_EXPR,
5803 else if (tree_expr_nonnegative_p (arg0))
5808 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5809 return fold_convert (type, arg0);
5810 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5811 return build (COMPLEX_EXPR, type,
5812 TREE_OPERAND (arg0, 0),
5813 negate_expr (TREE_OPERAND (arg0, 1)));
5814 else if (TREE_CODE (arg0) == COMPLEX_CST)
5815 return build_complex (type, TREE_REALPART (arg0),
5816 negate_expr (TREE_IMAGPART (arg0)));
5817 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5818 return fold (build (TREE_CODE (arg0), type,
5819 fold (build1 (CONJ_EXPR, type,
5820 TREE_OPERAND (arg0, 0))),
5821 fold (build1 (CONJ_EXPR,
5822 type, TREE_OPERAND (arg0, 1)))));
5823 else if (TREE_CODE (arg0) == CONJ_EXPR)
5824 return TREE_OPERAND (arg0, 0);
5830 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5831 ~ TREE_INT_CST_HIGH (arg0));
5832 TREE_TYPE (t) = type;
5833 force_fit_type (t, 0);
5834 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5835 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5837 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5838 return TREE_OPERAND (arg0, 0);
5842 /* A + (-B) -> A - B */
5843 if (TREE_CODE (arg1) == NEGATE_EXPR)
5844 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5845 /* (-A) + B -> B - A */
5846 if (TREE_CODE (arg0) == NEGATE_EXPR)
5847 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5848 else if (! FLOAT_TYPE_P (type))
5850 if (integer_zerop (arg1))
5851 return non_lvalue (fold_convert (type, arg0));
5853 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5854 with a constant, and the two constants have no bits in common,
5855 we should treat this as a BIT_IOR_EXPR since this may produce more
5857 if (TREE_CODE (arg0) == BIT_AND_EXPR
5858 && TREE_CODE (arg1) == BIT_AND_EXPR
5859 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5860 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5861 && integer_zerop (const_binop (BIT_AND_EXPR,
5862 TREE_OPERAND (arg0, 1),
5863 TREE_OPERAND (arg1, 1), 0)))
5865 code = BIT_IOR_EXPR;
5869 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5870 (plus (plus (mult) (mult)) (foo)) so that we can
5871 take advantage of the factoring cases below. */
5872 if ((TREE_CODE (arg0) == PLUS_EXPR
5873 && TREE_CODE (arg1) == MULT_EXPR)
5874 || (TREE_CODE (arg1) == PLUS_EXPR
5875 && TREE_CODE (arg0) == MULT_EXPR))
5877 tree parg0, parg1, parg, marg;
5879 if (TREE_CODE (arg0) == PLUS_EXPR)
5880 parg = arg0, marg = arg1;
5882 parg = arg1, marg = arg0;
5883 parg0 = TREE_OPERAND (parg, 0);
5884 parg1 = TREE_OPERAND (parg, 1);
5888 if (TREE_CODE (parg0) == MULT_EXPR
5889 && TREE_CODE (parg1) != MULT_EXPR)
5890 return fold (build (PLUS_EXPR, type,
5891 fold (build (PLUS_EXPR, type,
5892 fold_convert (type, parg0),
5893 fold_convert (type, marg))),
5894 fold_convert (type, parg1)));
5895 if (TREE_CODE (parg0) != MULT_EXPR
5896 && TREE_CODE (parg1) == MULT_EXPR)
5897 return fold (build (PLUS_EXPR, type,
5898 fold (build (PLUS_EXPR, type,
5899 fold_convert (type, parg1),
5900 fold_convert (type, marg))),
5901 fold_convert (type, parg0)));
5904 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5906 tree arg00, arg01, arg10, arg11;
5907 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5909 /* (A * C) + (B * C) -> (A+B) * C.
5910 We are most concerned about the case where C is a constant,
5911 but other combinations show up during loop reduction. Since
5912 it is not difficult, try all four possibilities. */
5914 arg00 = TREE_OPERAND (arg0, 0);
5915 arg01 = TREE_OPERAND (arg0, 1);
5916 arg10 = TREE_OPERAND (arg1, 0);
5917 arg11 = TREE_OPERAND (arg1, 1);
5920 if (operand_equal_p (arg01, arg11, 0))
5921 same = arg01, alt0 = arg00, alt1 = arg10;
5922 else if (operand_equal_p (arg00, arg10, 0))
5923 same = arg00, alt0 = arg01, alt1 = arg11;
5924 else if (operand_equal_p (arg00, arg11, 0))
5925 same = arg00, alt0 = arg01, alt1 = arg10;
5926 else if (operand_equal_p (arg01, arg10, 0))
5927 same = arg01, alt0 = arg00, alt1 = arg11;
5929 /* No identical multiplicands; see if we can find a common
5930 power-of-two factor in non-power-of-two multiplies. This
5931 can help in multi-dimensional array access. */
5932 else if (TREE_CODE (arg01) == INTEGER_CST
5933 && TREE_CODE (arg11) == INTEGER_CST
5934 && TREE_INT_CST_HIGH (arg01) == 0
5935 && TREE_INT_CST_HIGH (arg11) == 0)
5937 HOST_WIDE_INT int01, int11, tmp;
5938 int01 = TREE_INT_CST_LOW (arg01);
5939 int11 = TREE_INT_CST_LOW (arg11);
5941 /* Move min of absolute values to int11. */
5942 if ((int01 >= 0 ? int01 : -int01)
5943 < (int11 >= 0 ? int11 : -int11))
5945 tmp = int01, int01 = int11, int11 = tmp;
5946 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5947 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5950 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5952 alt0 = fold (build (MULT_EXPR, type, arg00,
5953 build_int_2 (int01 / int11, 0)));
5960 return fold (build (MULT_EXPR, type,
5961 fold (build (PLUS_EXPR, type, alt0, alt1)),
5967 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5968 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5969 return non_lvalue (fold_convert (type, arg0));
5971 /* Likewise if the operands are reversed. */
5972 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5973 return non_lvalue (fold_convert (type, arg1));
5975 /* Convert x+x into x*2.0. */
5976 if (operand_equal_p (arg0, arg1, 0)
5977 && SCALAR_FLOAT_TYPE_P (type))
5978 return fold (build (MULT_EXPR, type, arg0,
5979 build_real (type, dconst2)));
5981 /* Convert x*c+x into x*(c+1). */
5982 if (flag_unsafe_math_optimizations
5983 && TREE_CODE (arg0) == MULT_EXPR
5984 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5985 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5990 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5991 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5992 return fold (build (MULT_EXPR, type, arg1,
5993 build_real (type, c)));
5996 /* Convert x+x*c into x*(c+1). */
5997 if (flag_unsafe_math_optimizations
5998 && TREE_CODE (arg1) == MULT_EXPR
5999 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6000 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6001 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6005 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6006 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6007 return fold (build (MULT_EXPR, type, arg0,
6008 build_real (type, c)));
6011 /* Convert x*c1+x*c2 into x*(c1+c2). */
6012 if (flag_unsafe_math_optimizations
6013 && TREE_CODE (arg0) == MULT_EXPR
6014 && TREE_CODE (arg1) == MULT_EXPR
6015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6016 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6017 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6018 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6019 && operand_equal_p (TREE_OPERAND (arg0, 0),
6020 TREE_OPERAND (arg1, 0), 0))
6022 REAL_VALUE_TYPE c1, c2;
6024 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6025 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6026 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6027 return fold (build (MULT_EXPR, type,
6028 TREE_OPERAND (arg0, 0),
6029 build_real (type, c1)));
6034 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6035 is a rotate of A by C1 bits. */
6036 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6037 is a rotate of A by B bits. */
6039 enum tree_code code0, code1;
6040 code0 = TREE_CODE (arg0);
6041 code1 = TREE_CODE (arg1);
6042 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6043 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6044 && operand_equal_p (TREE_OPERAND (arg0, 0),
6045 TREE_OPERAND (arg1, 0), 0)
6046 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6048 tree tree01, tree11;
6049 enum tree_code code01, code11;
6051 tree01 = TREE_OPERAND (arg0, 1);
6052 tree11 = TREE_OPERAND (arg1, 1);
6053 STRIP_NOPS (tree01);
6054 STRIP_NOPS (tree11);
6055 code01 = TREE_CODE (tree01);
6056 code11 = TREE_CODE (tree11);
6057 if (code01 == INTEGER_CST
6058 && code11 == INTEGER_CST
6059 && TREE_INT_CST_HIGH (tree01) == 0
6060 && TREE_INT_CST_HIGH (tree11) == 0
6061 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6062 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6063 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6064 code0 == LSHIFT_EXPR ? tree01 : tree11);
6065 else if (code11 == MINUS_EXPR)
6067 tree tree110, tree111;
6068 tree110 = TREE_OPERAND (tree11, 0);
6069 tree111 = TREE_OPERAND (tree11, 1);
6070 STRIP_NOPS (tree110);
6071 STRIP_NOPS (tree111);
6072 if (TREE_CODE (tree110) == INTEGER_CST
6073 && 0 == compare_tree_int (tree110,
6075 (TREE_TYPE (TREE_OPERAND
6077 && operand_equal_p (tree01, tree111, 0))
6078 return build ((code0 == LSHIFT_EXPR
6081 type, TREE_OPERAND (arg0, 0), tree01);
6083 else if (code01 == MINUS_EXPR)
6085 tree tree010, tree011;
6086 tree010 = TREE_OPERAND (tree01, 0);
6087 tree011 = TREE_OPERAND (tree01, 1);
6088 STRIP_NOPS (tree010);
6089 STRIP_NOPS (tree011);
6090 if (TREE_CODE (tree010) == INTEGER_CST
6091 && 0 == compare_tree_int (tree010,
6093 (TREE_TYPE (TREE_OPERAND
6095 && operand_equal_p (tree11, tree011, 0))
6096 return build ((code0 != LSHIFT_EXPR
6099 type, TREE_OPERAND (arg0, 0), tree11);
6105 /* In most languages, can't associate operations on floats through
6106 parentheses. Rather than remember where the parentheses were, we
6107 don't associate floats at all, unless the user has specified
6108 -funsafe-math-optimizations. */
6111 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6113 tree var0, con0, lit0, minus_lit0;
6114 tree var1, con1, lit1, minus_lit1;
6116 /* Split both trees into variables, constants, and literals. Then
6117 associate each group together, the constants with literals,
6118 then the result with variables. This increases the chances of
6119 literals being recombined later and of generating relocatable
6120 expressions for the sum of a constant and literal. */
6121 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6122 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6123 code == MINUS_EXPR);
6125 /* Only do something if we found more than two objects. Otherwise,
6126 nothing has changed and we risk infinite recursion. */
6127 if (2 < ((var0 != 0) + (var1 != 0)
6128 + (con0 != 0) + (con1 != 0)
6129 + (lit0 != 0) + (lit1 != 0)
6130 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6132 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6133 if (code == MINUS_EXPR)
6136 var0 = associate_trees (var0, var1, code, type);
6137 con0 = associate_trees (con0, con1, code, type);
6138 lit0 = associate_trees (lit0, lit1, code, type);
6139 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6141 /* Preserve the MINUS_EXPR if the negative part of the literal is
6142 greater than the positive part. Otherwise, the multiplicative
6143 folding code (i.e extract_muldiv) may be fooled in case
6144 unsigned constants are subtracted, like in the following
6145 example: ((X*2 + 4) - 8U)/2. */
6146 if (minus_lit0 && lit0)
6148 if (TREE_CODE (lit0) == INTEGER_CST
6149 && TREE_CODE (minus_lit0) == INTEGER_CST
6150 && tree_int_cst_lt (lit0, minus_lit0))
6152 minus_lit0 = associate_trees (minus_lit0, lit0,
6158 lit0 = associate_trees (lit0, minus_lit0,
6166 return fold_convert (type,
6167 associate_trees (var0, minus_lit0,
6171 con0 = associate_trees (con0, minus_lit0,
6173 return fold_convert (type,
6174 associate_trees (var0, con0,
6179 con0 = associate_trees (con0, lit0, code, type);
6180 return fold_convert (type, associate_trees (var0, con0,
6187 t1 = const_binop (code, arg0, arg1, 0);
6188 if (t1 != NULL_TREE)
6190 /* The return value should always have
6191 the same type as the original expression. */
6192 if (TREE_TYPE (t1) != TREE_TYPE (t))
6193 t1 = fold_convert (TREE_TYPE (t), t1);
6200 /* A - (-B) -> A + B */
6201 if (TREE_CODE (arg1) == NEGATE_EXPR)
6202 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6203 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6204 if (TREE_CODE (arg0) == NEGATE_EXPR
6205 && (FLOAT_TYPE_P (type)
6206 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6207 && negate_expr_p (arg1)
6208 && reorder_operands_p (arg0, arg1))
6209 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6210 TREE_OPERAND (arg0, 0)));
6212 if (! FLOAT_TYPE_P (type))
6214 if (! wins && integer_zerop (arg0))
6215 return negate_expr (fold_convert (type, arg1));
6216 if (integer_zerop (arg1))
6217 return non_lvalue (fold_convert (type, arg0));
6219 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6220 about the case where C is a constant, just try one of the
6221 four possibilities. */
6223 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6224 && operand_equal_p (TREE_OPERAND (arg0, 1),
6225 TREE_OPERAND (arg1, 1), 0))
6226 return fold (build (MULT_EXPR, type,
6227 fold (build (MINUS_EXPR, type,
6228 TREE_OPERAND (arg0, 0),
6229 TREE_OPERAND (arg1, 0))),
6230 TREE_OPERAND (arg0, 1)));
6232 /* Fold A - (A & B) into ~B & A. */
6233 if (!TREE_SIDE_EFFECTS (arg0)
6234 && TREE_CODE (arg1) == BIT_AND_EXPR)
6236 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6237 return fold (build (BIT_AND_EXPR, type,
6238 fold (build1 (BIT_NOT_EXPR, type,
6239 TREE_OPERAND (arg1, 0))),
6241 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6242 return fold (build (BIT_AND_EXPR, type,
6243 fold (build1 (BIT_NOT_EXPR, type,
6244 TREE_OPERAND (arg1, 1))),
6248 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6249 any power of 2 minus 1. */
6250 if (TREE_CODE (arg0) == BIT_AND_EXPR
6251 && TREE_CODE (arg1) == BIT_AND_EXPR
6252 && operand_equal_p (TREE_OPERAND (arg0, 0),
6253 TREE_OPERAND (arg1, 0), 0))
6255 tree mask0 = TREE_OPERAND (arg0, 1);
6256 tree mask1 = TREE_OPERAND (arg1, 1);
6257 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6259 if (operand_equal_p (tem, mask1, 0))
6261 tem = fold (build (BIT_XOR_EXPR, type,
6262 TREE_OPERAND (arg0, 0), mask1));
6263 return fold (build (MINUS_EXPR, type, tem, mask1));
6268 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6269 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6270 return non_lvalue (fold_convert (type, arg0));
6272 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6273 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6274 (-ARG1 + ARG0) reduces to -ARG1. */
6275 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6276 return negate_expr (fold_convert (type, arg1));
6278 /* Fold &x - &x. This can happen from &x.foo - &x.
6279 This is unsafe for certain floats even in non-IEEE formats.
6280 In IEEE, it is unsafe because it does wrong for NaNs.
6281 Also note that operand_equal_p is always false if an operand
6284 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6285 && operand_equal_p (arg0, arg1, 0))
6286 return fold_convert (type, integer_zero_node);
6291 /* (-A) * (-B) -> A * B */
6292 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6293 return fold (build (MULT_EXPR, type,
6294 TREE_OPERAND (arg0, 0),
6295 negate_expr (arg1)));
6296 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6297 return fold (build (MULT_EXPR, type,
6299 TREE_OPERAND (arg1, 0)));
6301 if (! FLOAT_TYPE_P (type))
6303 if (integer_zerop (arg1))
6304 return omit_one_operand (type, arg1, arg0);
6305 if (integer_onep (arg1))
6306 return non_lvalue (fold_convert (type, arg0));
6308 /* (a * (1 << b)) is (a << b) */
6309 if (TREE_CODE (arg1) == LSHIFT_EXPR
6310 && integer_onep (TREE_OPERAND (arg1, 0)))
6311 return fold (build (LSHIFT_EXPR, type, arg0,
6312 TREE_OPERAND (arg1, 1)));
6313 if (TREE_CODE (arg0) == LSHIFT_EXPR
6314 && integer_onep (TREE_OPERAND (arg0, 0)))
6315 return fold (build (LSHIFT_EXPR, type, arg1,
6316 TREE_OPERAND (arg0, 1)));
6318 if (TREE_CODE (arg1) == INTEGER_CST
6319 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6320 fold_convert (type, arg1),
6322 return fold_convert (type, tem);
6327 /* Maybe fold x * 0 to 0. The expressions aren't the same
6328 when x is NaN, since x * 0 is also NaN. Nor are they the
6329 same in modes with signed zeros, since multiplying a
6330 negative value by 0 gives -0, not +0. */
6331 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6332 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6333 && real_zerop (arg1))
6334 return omit_one_operand (type, arg1, arg0);
6335 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6336 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6337 && real_onep (arg1))
6338 return non_lvalue (fold_convert (type, arg0));
6340 /* Transform x * -1.0 into -x. */
6341 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6342 && real_minus_onep (arg1))
6343 return fold (build1 (NEGATE_EXPR, type, arg0));
6345 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6346 if (flag_unsafe_math_optimizations
6347 && TREE_CODE (arg0) == RDIV_EXPR
6348 && TREE_CODE (arg1) == REAL_CST
6349 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6351 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6354 return fold (build (RDIV_EXPR, type, tem,
6355 TREE_OPERAND (arg0, 1)));
6358 if (flag_unsafe_math_optimizations)
6360 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6361 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6363 /* Optimizations of sqrt(...)*sqrt(...). */
6364 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6365 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6366 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6368 tree sqrtfn, arg, arglist;
6369 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6370 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6372 /* Optimize sqrt(x)*sqrt(x) as x. */
6373 if (operand_equal_p (arg00, arg10, 0)
6374 && ! HONOR_SNANS (TYPE_MODE (type)))
6377 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6378 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6379 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6380 arglist = build_tree_list (NULL_TREE, arg);
6381 return build_function_call_expr (sqrtfn, arglist);
6384 /* Optimize expN(x)*expN(y) as expN(x+y). */
6385 if (fcode0 == fcode1
6386 && (fcode0 == BUILT_IN_EXP
6387 || fcode0 == BUILT_IN_EXPF
6388 || fcode0 == BUILT_IN_EXPL
6389 || fcode0 == BUILT_IN_EXP2
6390 || fcode0 == BUILT_IN_EXP2F
6391 || fcode0 == BUILT_IN_EXP2L
6392 || fcode0 == BUILT_IN_EXP10
6393 || fcode0 == BUILT_IN_EXP10F
6394 || fcode0 == BUILT_IN_EXP10L
6395 || fcode0 == BUILT_IN_POW10
6396 || fcode0 == BUILT_IN_POW10F
6397 || fcode0 == BUILT_IN_POW10L))
6399 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6400 tree arg = build (PLUS_EXPR, type,
6401 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6402 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6403 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6404 return build_function_call_expr (expfn, arglist);
6407 /* Optimizations of pow(...)*pow(...). */
6408 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6409 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6410 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6412 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6413 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6415 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6416 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6419 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6420 if (operand_equal_p (arg01, arg11, 0))
6422 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6423 tree arg = build (MULT_EXPR, type, arg00, arg10);
6424 tree arglist = tree_cons (NULL_TREE, fold (arg),
6425 build_tree_list (NULL_TREE,
6427 return build_function_call_expr (powfn, arglist);
6430 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6431 if (operand_equal_p (arg00, arg10, 0))
6433 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6434 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6435 tree arglist = tree_cons (NULL_TREE, arg00,
6436 build_tree_list (NULL_TREE,
6438 return build_function_call_expr (powfn, arglist);
6442 /* Optimize tan(x)*cos(x) as sin(x). */
6443 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6444 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6445 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6446 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6447 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6448 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6449 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6450 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6458 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6462 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6466 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6472 if (sinfn != NULL_TREE)
6473 return build_function_call_expr (sinfn,
6474 TREE_OPERAND (arg0, 1));
6477 /* Optimize x*pow(x,c) as pow(x,c+1). */
6478 if (fcode1 == BUILT_IN_POW
6479 || fcode1 == BUILT_IN_POWF
6480 || fcode1 == BUILT_IN_POWL)
6482 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6483 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6485 if (TREE_CODE (arg11) == REAL_CST
6486 && ! TREE_CONSTANT_OVERFLOW (arg11)
6487 && operand_equal_p (arg0, arg10, 0))
6489 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6493 c = TREE_REAL_CST (arg11);
6494 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6495 arg = build_real (type, c);
6496 arglist = build_tree_list (NULL_TREE, arg);
6497 arglist = tree_cons (NULL_TREE, arg0, arglist);
6498 return build_function_call_expr (powfn, arglist);
6502 /* Optimize pow(x,c)*x as pow(x,c+1). */
6503 if (fcode0 == BUILT_IN_POW
6504 || fcode0 == BUILT_IN_POWF
6505 || fcode0 == BUILT_IN_POWL)
6507 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6508 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6510 if (TREE_CODE (arg01) == REAL_CST
6511 && ! TREE_CONSTANT_OVERFLOW (arg01)
6512 && operand_equal_p (arg1, arg00, 0))
6514 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6518 c = TREE_REAL_CST (arg01);
6519 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6520 arg = build_real (type, c);
6521 arglist = build_tree_list (NULL_TREE, arg);
6522 arglist = tree_cons (NULL_TREE, arg1, arglist);
6523 return build_function_call_expr (powfn, arglist);
6527 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6529 && operand_equal_p (arg0, arg1, 0))
6533 if (type == double_type_node)
6534 powfn = implicit_built_in_decls[BUILT_IN_POW];
6535 else if (type == float_type_node)
6536 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6537 else if (type == long_double_type_node)
6538 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6544 tree arg = build_real (type, dconst2);
6545 tree arglist = build_tree_list (NULL_TREE, arg);
6546 arglist = tree_cons (NULL_TREE, arg0, arglist);
6547 return build_function_call_expr (powfn, arglist);
6556 if (integer_all_onesp (arg1))
6557 return omit_one_operand (type, arg1, arg0);
6558 if (integer_zerop (arg1))
6559 return non_lvalue (fold_convert (type, arg0));
6560 t1 = distribute_bit_expr (code, type, arg0, arg1);
6561 if (t1 != NULL_TREE)
6564 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6566 This results in more efficient code for machines without a NAND
6567 instruction. Combine will canonicalize to the first form
6568 which will allow use of NAND instructions provided by the
6569 backend if they exist. */
6570 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6571 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6573 return fold (build1 (BIT_NOT_EXPR, type,
6574 build (BIT_AND_EXPR, type,
6575 TREE_OPERAND (arg0, 0),
6576 TREE_OPERAND (arg1, 0))));
6579 /* See if this can be simplified into a rotate first. If that
6580 is unsuccessful continue in the association code. */
6584 if (integer_zerop (arg1))
6585 return non_lvalue (fold_convert (type, arg0));
6586 if (integer_all_onesp (arg1))
6587 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6589 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6590 with a constant, and the two constants have no bits in common,
6591 we should treat this as a BIT_IOR_EXPR since this may produce more
6593 if (TREE_CODE (arg0) == BIT_AND_EXPR
6594 && TREE_CODE (arg1) == BIT_AND_EXPR
6595 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6596 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6597 && integer_zerop (const_binop (BIT_AND_EXPR,
6598 TREE_OPERAND (arg0, 1),
6599 TREE_OPERAND (arg1, 1), 0)))
6601 code = BIT_IOR_EXPR;
6605 /* See if this can be simplified into a rotate first. If that
6606 is unsuccessful continue in the association code. */
6610 if (integer_all_onesp (arg1))
6611 return non_lvalue (fold_convert (type, arg0));
6612 if (integer_zerop (arg1))
6613 return omit_one_operand (type, arg1, arg0);
6614 t1 = distribute_bit_expr (code, type, arg0, arg1);
6615 if (t1 != NULL_TREE)
6617 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6618 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6619 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6622 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6624 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6625 && (~TREE_INT_CST_LOW (arg1)
6626 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6627 return fold_convert (type, TREE_OPERAND (arg0, 0));
6630 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6632 This results in more efficient code for machines without a NOR
6633 instruction. Combine will canonicalize to the first form
6634 which will allow use of NOR instructions provided by the
6635 backend if they exist. */
6636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6639 return fold (build1 (BIT_NOT_EXPR, type,
6640 build (BIT_IOR_EXPR, type,
6641 TREE_OPERAND (arg0, 0),
6642 TREE_OPERAND (arg1, 0))));
6648 /* Don't touch a floating-point divide by zero unless the mode
6649 of the constant can represent infinity. */
6650 if (TREE_CODE (arg1) == REAL_CST
6651 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6652 && real_zerop (arg1))
6655 /* (-A) / (-B) -> A / B */
6656 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6657 return fold (build (RDIV_EXPR, type,
6658 TREE_OPERAND (arg0, 0),
6659 negate_expr (arg1)));
6660 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6661 return fold (build (RDIV_EXPR, type,
6663 TREE_OPERAND (arg1, 0)));
6665 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6666 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6667 && real_onep (arg1))
6668 return non_lvalue (fold_convert (type, arg0));
6670 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6671 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6672 && real_minus_onep (arg1))
6673 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6675 /* If ARG1 is a constant, we can convert this to a multiply by the
6676 reciprocal. This does not have the same rounding properties,
6677 so only do this if -funsafe-math-optimizations. We can actually
6678 always safely do it if ARG1 is a power of two, but it's hard to
6679 tell if it is or not in a portable manner. */
6680 if (TREE_CODE (arg1) == REAL_CST)
6682 if (flag_unsafe_math_optimizations
6683 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6685 return fold (build (MULT_EXPR, type, arg0, tem));
6686 /* Find the reciprocal if optimizing and the result is exact. */
6690 r = TREE_REAL_CST (arg1);
6691 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6693 tem = build_real (type, r);
6694 return fold (build (MULT_EXPR, type, arg0, tem));
6698 /* Convert A/B/C to A/(B*C). */
6699 if (flag_unsafe_math_optimizations
6700 && TREE_CODE (arg0) == RDIV_EXPR)
6701 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6702 fold (build (MULT_EXPR, type,
6703 TREE_OPERAND (arg0, 1), arg1))));
6705 /* Convert A/(B/C) to (A/B)*C. */
6706 if (flag_unsafe_math_optimizations
6707 && TREE_CODE (arg1) == RDIV_EXPR)
6708 return fold (build (MULT_EXPR, type,
6709 fold (build (RDIV_EXPR, type, arg0,
6710 TREE_OPERAND (arg1, 0))),
6711 TREE_OPERAND (arg1, 1)));
6713 /* Convert C1/(X*C2) into (C1/C2)/X. */
6714 if (flag_unsafe_math_optimizations
6715 && TREE_CODE (arg1) == MULT_EXPR
6716 && TREE_CODE (arg0) == REAL_CST
6717 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6719 tree tem = const_binop (RDIV_EXPR, arg0,
6720 TREE_OPERAND (arg1, 1), 0);
6722 return fold (build (RDIV_EXPR, type, tem,
6723 TREE_OPERAND (arg1, 0)));
6726 if (flag_unsafe_math_optimizations)
6728 enum built_in_function fcode = builtin_mathfn_code (arg1);
6729 /* Optimize x/expN(y) into x*expN(-y). */
6730 if (fcode == BUILT_IN_EXP
6731 || fcode == BUILT_IN_EXPF
6732 || fcode == BUILT_IN_EXPL
6733 || fcode == BUILT_IN_EXP2
6734 || fcode == BUILT_IN_EXP2F
6735 || fcode == BUILT_IN_EXP2L
6736 || fcode == BUILT_IN_EXP10
6737 || fcode == BUILT_IN_EXP10F
6738 || fcode == BUILT_IN_EXP10L
6739 || fcode == BUILT_IN_POW10
6740 || fcode == BUILT_IN_POW10F
6741 || fcode == BUILT_IN_POW10L)
6743 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6744 tree arg = build1 (NEGATE_EXPR, type,
6745 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6746 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6747 arg1 = build_function_call_expr (expfn, arglist);
6748 return fold (build (MULT_EXPR, type, arg0, arg1));
6751 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6752 if (fcode == BUILT_IN_POW
6753 || fcode == BUILT_IN_POWF
6754 || fcode == BUILT_IN_POWL)
6756 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6757 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6758 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6759 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6760 tree arglist = tree_cons(NULL_TREE, arg10,
6761 build_tree_list (NULL_TREE, neg11));
6762 arg1 = build_function_call_expr (powfn, arglist);
6763 return fold (build (MULT_EXPR, type, arg0, arg1));
6767 if (flag_unsafe_math_optimizations)
6769 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6770 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6772 /* Optimize sin(x)/cos(x) as tan(x). */
6773 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6774 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6775 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6776 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6777 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6781 if (fcode0 == BUILT_IN_SIN)
6782 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6783 else if (fcode0 == BUILT_IN_SINF)
6784 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6785 else if (fcode0 == BUILT_IN_SINL)
6786 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6790 if (tanfn != NULL_TREE)
6791 return build_function_call_expr (tanfn,
6792 TREE_OPERAND (arg0, 1));
6795 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6796 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6797 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6798 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6799 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6800 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6804 if (fcode0 == BUILT_IN_COS)
6805 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6806 else if (fcode0 == BUILT_IN_COSF)
6807 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6808 else if (fcode0 == BUILT_IN_COSL)
6809 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6813 if (tanfn != NULL_TREE)
6815 tree tmp = TREE_OPERAND (arg0, 1);
6816 tmp = build_function_call_expr (tanfn, tmp);
6817 return fold (build (RDIV_EXPR, type,
6818 build_real (type, dconst1),
6823 /* Optimize pow(x,c)/x as pow(x,c-1). */
6824 if (fcode0 == BUILT_IN_POW
6825 || fcode0 == BUILT_IN_POWF
6826 || fcode0 == BUILT_IN_POWL)
6828 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6829 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6830 if (TREE_CODE (arg01) == REAL_CST
6831 && ! TREE_CONSTANT_OVERFLOW (arg01)
6832 && operand_equal_p (arg1, arg00, 0))
6834 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6838 c = TREE_REAL_CST (arg01);
6839 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6840 arg = build_real (type, c);
6841 arglist = build_tree_list (NULL_TREE, arg);
6842 arglist = tree_cons (NULL_TREE, arg1, arglist);
6843 return build_function_call_expr (powfn, arglist);
6849 case TRUNC_DIV_EXPR:
6850 case ROUND_DIV_EXPR:
6851 case FLOOR_DIV_EXPR:
6853 case EXACT_DIV_EXPR:
6854 if (integer_onep (arg1))
6855 return non_lvalue (fold_convert (type, arg0));
6856 if (integer_zerop (arg1))
6859 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6860 operation, EXACT_DIV_EXPR.
6862 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6863 At one time others generated faster code, it's not clear if they do
6864 after the last round to changes to the DIV code in expmed.c. */
6865 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6866 && multiple_of_p (type, arg0, arg1))
6867 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6869 if (TREE_CODE (arg1) == INTEGER_CST
6870 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6872 return fold_convert (type, tem);
6877 case FLOOR_MOD_EXPR:
6878 case ROUND_MOD_EXPR:
6879 case TRUNC_MOD_EXPR:
6880 if (integer_onep (arg1))
6881 return omit_one_operand (type, integer_zero_node, arg0);
6882 if (integer_zerop (arg1))
6885 if (TREE_CODE (arg1) == INTEGER_CST
6886 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6888 return fold_convert (type, tem);
6894 if (integer_all_onesp (arg0))
6895 return omit_one_operand (type, arg0, arg1);
6899 /* Optimize -1 >> x for arithmetic right shifts. */
6900 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6901 return omit_one_operand (type, arg0, arg1);
6902 /* ... fall through ... */
6906 if (integer_zerop (arg1))
6907 return non_lvalue (fold_convert (type, arg0));
6908 if (integer_zerop (arg0))
6909 return omit_one_operand (type, arg0, arg1);
6911 /* Since negative shift count is not well-defined,
6912 don't try to compute it in the compiler. */
6913 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6915 /* Rewrite an LROTATE_EXPR by a constant into an
6916 RROTATE_EXPR by a new constant. */
6917 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6919 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6920 tem = fold_convert (TREE_TYPE (arg1), tem);
6921 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6922 return fold (build (RROTATE_EXPR, type, arg0, tem));
6925 /* If we have a rotate of a bit operation with the rotate count and
6926 the second operand of the bit operation both constant,
6927 permute the two operations. */
6928 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6929 && (TREE_CODE (arg0) == BIT_AND_EXPR
6930 || TREE_CODE (arg0) == BIT_IOR_EXPR
6931 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6933 return fold (build (TREE_CODE (arg0), type,
6934 fold (build (code, type,
6935 TREE_OPERAND (arg0, 0), arg1)),
6936 fold (build (code, type,
6937 TREE_OPERAND (arg0, 1), arg1))));
6939 /* Two consecutive rotates adding up to the width of the mode can
6941 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6942 && TREE_CODE (arg0) == RROTATE_EXPR
6943 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6944 && TREE_INT_CST_HIGH (arg1) == 0
6945 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6946 && ((TREE_INT_CST_LOW (arg1)
6947 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6948 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6949 return TREE_OPERAND (arg0, 0);
6954 if (operand_equal_p (arg0, arg1, 0))
6955 return omit_one_operand (type, arg0, arg1);
6956 if (INTEGRAL_TYPE_P (type)
6957 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6958 return omit_one_operand (type, arg1, arg0);
6962 if (operand_equal_p (arg0, arg1, 0))
6963 return omit_one_operand (type, arg0, arg1);
6964 if (INTEGRAL_TYPE_P (type)
6965 && TYPE_MAX_VALUE (type)
6966 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6967 return omit_one_operand (type, arg1, arg0);
6970 case TRUTH_NOT_EXPR:
6971 /* Note that the operand of this must be an int
6972 and its values must be 0 or 1.
6973 ("true" is a fixed value perhaps depending on the language,
6974 but we don't handle values other than 1 correctly yet.) */
6975 tem = invert_truthvalue (arg0);
6976 /* Avoid infinite recursion. */
6977 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6979 tem = fold_single_bit_test (code, arg0, arg1, type);
6984 return fold_convert (type, tem);
6986 case TRUTH_ANDIF_EXPR:
6987 /* Note that the operands of this must be ints
6988 and their values must be 0 or 1.
6989 ("true" is a fixed value perhaps depending on the language.) */
6990 /* If first arg is constant zero, return it. */
6991 if (integer_zerop (arg0))
6992 return fold_convert (type, arg0);
6993 case TRUTH_AND_EXPR:
6994 /* If either arg is constant true, drop it. */
6995 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6996 return non_lvalue (fold_convert (type, arg1));
6997 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6998 /* Preserve sequence points. */
6999 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7000 return non_lvalue (fold_convert (type, arg0));
7001 /* If second arg is constant zero, result is zero, but first arg
7002 must be evaluated. */
7003 if (integer_zerop (arg1))
7004 return omit_one_operand (type, arg1, arg0);
7005 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7006 case will be handled here. */
7007 if (integer_zerop (arg0))
7008 return omit_one_operand (type, arg0, arg1);
7011 /* We only do these simplifications if we are optimizing. */
7015 /* Check for things like (A || B) && (A || C). We can convert this
7016 to A || (B && C). Note that either operator can be any of the four
7017 truth and/or operations and the transformation will still be
7018 valid. Also note that we only care about order for the
7019 ANDIF and ORIF operators. If B contains side effects, this
7020 might change the truth-value of A. */
7021 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7022 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7023 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7024 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7025 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7026 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7028 tree a00 = TREE_OPERAND (arg0, 0);
7029 tree a01 = TREE_OPERAND (arg0, 1);
7030 tree a10 = TREE_OPERAND (arg1, 0);
7031 tree a11 = TREE_OPERAND (arg1, 1);
7032 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7033 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7034 && (code == TRUTH_AND_EXPR
7035 || code == TRUTH_OR_EXPR));
7037 if (operand_equal_p (a00, a10, 0))
7038 return fold (build (TREE_CODE (arg0), type, a00,
7039 fold (build (code, type, a01, a11))));
7040 else if (commutative && operand_equal_p (a00, a11, 0))
7041 return fold (build (TREE_CODE (arg0), type, a00,
7042 fold (build (code, type, a01, a10))));
7043 else if (commutative && operand_equal_p (a01, a10, 0))
7044 return fold (build (TREE_CODE (arg0), type, a01,
7045 fold (build (code, type, a00, a11))));
7047 /* This case if tricky because we must either have commutative
7048 operators or else A10 must not have side-effects. */
7050 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7051 && operand_equal_p (a01, a11, 0))
7052 return fold (build (TREE_CODE (arg0), type,
7053 fold (build (code, type, a00, a10)),
7057 /* See if we can build a range comparison. */
7058 if (0 != (tem = fold_range_test (t)))
7061 /* Check for the possibility of merging component references. If our
7062 lhs is another similar operation, try to merge its rhs with our
7063 rhs. Then try to merge our lhs and rhs. */
7064 if (TREE_CODE (arg0) == code
7065 && 0 != (tem = fold_truthop (code, type,
7066 TREE_OPERAND (arg0, 1), arg1)))
7067 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7069 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7074 case TRUTH_ORIF_EXPR:
7075 /* Note that the operands of this must be ints
7076 and their values must be 0 or true.
7077 ("true" is a fixed value perhaps depending on the language.) */
7078 /* If first arg is constant true, return it. */
7079 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7080 return fold_convert (type, arg0);
7082 /* If either arg is constant zero, drop it. */
7083 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7084 return non_lvalue (fold_convert (type, arg1));
7085 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7086 /* Preserve sequence points. */
7087 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7088 return non_lvalue (fold_convert (type, arg0));
7089 /* If second arg is constant true, result is true, but we must
7090 evaluate first arg. */
7091 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7092 return omit_one_operand (type, arg1, arg0);
7093 /* Likewise for first arg, but note this only occurs here for
7095 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7096 return omit_one_operand (type, arg0, arg1);
7099 case TRUTH_XOR_EXPR:
7100 /* If either arg is constant zero, drop it. */
7101 if (integer_zerop (arg0))
7102 return non_lvalue (fold_convert (type, arg1));
7103 if (integer_zerop (arg1))
7104 return non_lvalue (fold_convert (type, arg0));
7105 /* If either arg is constant true, this is a logical inversion. */
7106 if (integer_onep (arg0))
7107 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7108 if (integer_onep (arg1))
7109 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7118 /* If one arg is a real or integer constant, put it last. */
7119 if (tree_swap_operands_p (arg0, arg1, true))
7120 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7122 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7124 tree targ0 = strip_float_extensions (arg0);
7125 tree targ1 = strip_float_extensions (arg1);
7126 tree newtype = TREE_TYPE (targ0);
7128 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7129 newtype = TREE_TYPE (targ1);
7131 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7132 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7133 return fold (build (code, type, fold_convert (newtype, targ0),
7134 fold_convert (newtype, targ1)));
7136 /* (-a) CMP (-b) -> b CMP a */
7137 if (TREE_CODE (arg0) == NEGATE_EXPR
7138 && TREE_CODE (arg1) == NEGATE_EXPR)
7139 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7140 TREE_OPERAND (arg0, 0)));
7142 if (TREE_CODE (arg1) == REAL_CST)
7144 REAL_VALUE_TYPE cst;
7145 cst = TREE_REAL_CST (arg1);
7147 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7148 if (TREE_CODE (arg0) == NEGATE_EXPR)
7150 fold (build (swap_tree_comparison (code), type,
7151 TREE_OPERAND (arg0, 0),
7152 build_real (TREE_TYPE (arg1),
7153 REAL_VALUE_NEGATE (cst))));
7155 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7156 /* a CMP (-0) -> a CMP 0 */
7157 if (REAL_VALUE_MINUS_ZERO (cst))
7158 return fold (build (code, type, arg0,
7159 build_real (TREE_TYPE (arg1), dconst0)));
7161 /* x != NaN is always true, other ops are always false. */
7162 if (REAL_VALUE_ISNAN (cst)
7163 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7165 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7166 return omit_one_operand (type, fold_convert (type, t), arg0);
7169 /* Fold comparisons against infinity. */
7170 if (REAL_VALUE_ISINF (cst))
7172 tem = fold_inf_compare (code, type, arg0, arg1);
7173 if (tem != NULL_TREE)
7178 /* If this is a comparison of a real constant with a PLUS_EXPR
7179 or a MINUS_EXPR of a real constant, we can convert it into a
7180 comparison with a revised real constant as long as no overflow
7181 occurs when unsafe_math_optimizations are enabled. */
7182 if (flag_unsafe_math_optimizations
7183 && TREE_CODE (arg1) == REAL_CST
7184 && (TREE_CODE (arg0) == PLUS_EXPR
7185 || TREE_CODE (arg0) == MINUS_EXPR)
7186 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7187 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7188 ? MINUS_EXPR : PLUS_EXPR,
7189 arg1, TREE_OPERAND (arg0, 1), 0))
7190 && ! TREE_CONSTANT_OVERFLOW (tem))
7191 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7193 /* Likewise, we can simplify a comparison of a real constant with
7194 a MINUS_EXPR whose first operand is also a real constant, i.e.
7195 (c1 - x) < c2 becomes x > c1-c2. */
7196 if (flag_unsafe_math_optimizations
7197 && TREE_CODE (arg1) == REAL_CST
7198 && TREE_CODE (arg0) == MINUS_EXPR
7199 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7200 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7202 && ! TREE_CONSTANT_OVERFLOW (tem))
7203 return fold (build (swap_tree_comparison (code), type,
7204 TREE_OPERAND (arg0, 1), tem));
7206 /* Fold comparisons against built-in math functions. */
7207 if (TREE_CODE (arg1) == REAL_CST
7208 && flag_unsafe_math_optimizations
7209 && ! flag_errno_math)
7211 enum built_in_function fcode = builtin_mathfn_code (arg0);
7213 if (fcode != END_BUILTINS)
7215 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7216 if (tem != NULL_TREE)
7222 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7223 if (TREE_CONSTANT (arg1)
7224 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7225 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7226 /* This optimization is invalid for ordered comparisons
7227 if CONST+INCR overflows or if foo+incr might overflow.
7228 This optimization is invalid for floating point due to rounding.
7229 For pointer types we assume overflow doesn't happen. */
7230 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7231 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7232 && (code == EQ_EXPR || code == NE_EXPR))))
7234 tree varop, newconst;
7236 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7238 newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7239 arg1, TREE_OPERAND (arg0, 1)));
7240 varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7241 TREE_OPERAND (arg0, 0),
7242 TREE_OPERAND (arg0, 1));
7246 newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7247 arg1, TREE_OPERAND (arg0, 1)));
7248 varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7249 TREE_OPERAND (arg0, 0),
7250 TREE_OPERAND (arg0, 1));
7254 /* If VAROP is a reference to a bitfield, we must mask
7255 the constant by the width of the field. */
7256 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7257 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7259 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7260 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7261 tree folded_compare, shift;
7263 /* First check whether the comparison would come out
7264 always the same. If we don't do that we would
7265 change the meaning with the masking. */
7266 folded_compare = fold (build (code, type,
7267 TREE_OPERAND (varop, 0),
7269 if (integer_zerop (folded_compare)
7270 || integer_onep (folded_compare))
7271 return omit_one_operand (type, folded_compare, varop);
7273 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7275 newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7277 newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7281 return fold (build (code, type, varop, newconst));
7284 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7285 This transformation affects the cases which are handled in later
7286 optimizations involving comparisons with non-negative constants. */
7287 if (TREE_CODE (arg1) == INTEGER_CST
7288 && TREE_CODE (arg0) != INTEGER_CST
7289 && tree_int_cst_sgn (arg1) > 0)
7294 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7295 return fold (build (GT_EXPR, type, arg0, arg1));
7298 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7299 return fold (build (LE_EXPR, type, arg0, arg1));
7306 /* Comparisons with the highest or lowest possible integer of
7307 the specified size will have known values. */
7309 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7311 if (TREE_CODE (arg1) == INTEGER_CST
7312 && ! TREE_CONSTANT_OVERFLOW (arg1)
7313 && width <= HOST_BITS_PER_WIDE_INT
7314 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7315 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7317 unsigned HOST_WIDE_INT signed_max;
7318 unsigned HOST_WIDE_INT max, min;
7320 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7322 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7324 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7330 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7333 if (TREE_INT_CST_HIGH (arg1) == 0
7334 && TREE_INT_CST_LOW (arg1) == max)
7338 return omit_one_operand (type,
7343 return fold (build (EQ_EXPR, type, arg0, arg1));
7346 return omit_one_operand (type,
7351 return fold (build (NE_EXPR, type, arg0, arg1));
7353 /* The GE_EXPR and LT_EXPR cases above are not normally
7354 reached because of previous transformations. */
7359 else if (TREE_INT_CST_HIGH (arg1) == 0
7360 && TREE_INT_CST_LOW (arg1) == max - 1)
7364 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7365 return fold (build (EQ_EXPR, type, arg0, arg1));
7367 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7368 return fold (build (NE_EXPR, type, arg0, arg1));
7372 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7373 && TREE_INT_CST_LOW (arg1) == min)
7377 return omit_one_operand (type,
7382 return fold (build (EQ_EXPR, type, arg0, arg1));
7385 return omit_one_operand (type,
7390 return fold (build (NE_EXPR, type, arg0, arg1));
7395 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7396 && TREE_INT_CST_LOW (arg1) == min + 1)
7400 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7401 return fold (build (NE_EXPR, type, arg0, arg1));
7403 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7404 return fold (build (EQ_EXPR, type, arg0, arg1));
7409 else if (TREE_INT_CST_HIGH (arg1) == 0
7410 && TREE_INT_CST_LOW (arg1) == signed_max
7411 && TREE_UNSIGNED (TREE_TYPE (arg1))
7412 /* signed_type does not work on pointer types. */
7413 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7415 /* The following case also applies to X < signed_max+1
7416 and X >= signed_max+1 because previous transformations. */
7417 if (code == LE_EXPR || code == GT_EXPR)
7420 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7421 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7423 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7424 type, fold_convert (st0, arg0),
7425 fold_convert (st1, integer_zero_node)));
7431 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7432 a MINUS_EXPR of a constant, we can convert it into a comparison with
7433 a revised constant as long as no overflow occurs. */
7434 if ((code == EQ_EXPR || code == NE_EXPR)
7435 && TREE_CODE (arg1) == INTEGER_CST
7436 && (TREE_CODE (arg0) == PLUS_EXPR
7437 || TREE_CODE (arg0) == MINUS_EXPR)
7438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7439 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7440 ? MINUS_EXPR : PLUS_EXPR,
7441 arg1, TREE_OPERAND (arg0, 1), 0))
7442 && ! TREE_CONSTANT_OVERFLOW (tem))
7443 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7445 /* Similarly for a NEGATE_EXPR. */
7446 else if ((code == EQ_EXPR || code == NE_EXPR)
7447 && TREE_CODE (arg0) == NEGATE_EXPR
7448 && TREE_CODE (arg1) == INTEGER_CST
7449 && 0 != (tem = negate_expr (arg1))
7450 && TREE_CODE (tem) == INTEGER_CST
7451 && ! TREE_CONSTANT_OVERFLOW (tem))
7452 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7454 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7455 for !=. Don't do this for ordered comparisons due to overflow. */
7456 else if ((code == NE_EXPR || code == EQ_EXPR)
7457 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7458 return fold (build (code, type,
7459 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7461 /* If we are widening one operand of an integer comparison,
7462 see if the other operand is similarly being widened. Perhaps we
7463 can do the comparison in the narrower type. */
7464 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7465 && TREE_CODE (arg0) == NOP_EXPR
7466 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7467 && (code == EQ_EXPR || code == NE_EXPR
7468 || TREE_UNSIGNED (TREE_TYPE (arg0))
7469 == TREE_UNSIGNED (TREE_TYPE (tem)))
7470 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7471 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7472 || (TREE_CODE (t1) == INTEGER_CST
7473 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7474 return fold (build (code, type, tem,
7475 fold_convert (TREE_TYPE (tem), t1)));
7477 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7478 constant, we can simplify it. */
7479 else if (TREE_CODE (arg1) == INTEGER_CST
7480 && (TREE_CODE (arg0) == MIN_EXPR
7481 || TREE_CODE (arg0) == MAX_EXPR)
7482 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7483 return optimize_minmax_comparison (t);
7485 /* If we are comparing an ABS_EXPR with a constant, we can
7486 convert all the cases into explicit comparisons, but they may
7487 well not be faster than doing the ABS and one comparison.
7488 But ABS (X) <= C is a range comparison, which becomes a subtraction
7489 and a comparison, and is probably faster. */
7490 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7491 && TREE_CODE (arg0) == ABS_EXPR
7492 && ! TREE_SIDE_EFFECTS (arg0)
7493 && (0 != (tem = negate_expr (arg1)))
7494 && TREE_CODE (tem) == INTEGER_CST
7495 && ! TREE_CONSTANT_OVERFLOW (tem))
7496 return fold (build (TRUTH_ANDIF_EXPR, type,
7497 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7498 build (LE_EXPR, type,
7499 TREE_OPERAND (arg0, 0), arg1)));
7501 /* If this is an EQ or NE comparison with zero and ARG0 is
7502 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7503 two operations, but the latter can be done in one less insn
7504 on machines that have only two-operand insns or on which a
7505 constant cannot be the first operand. */
7506 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7507 && TREE_CODE (arg0) == BIT_AND_EXPR)
7509 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7510 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7512 fold (build (code, type,
7513 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7515 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7516 TREE_OPERAND (arg0, 1),
7517 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7518 fold_convert (TREE_TYPE (arg0),
7521 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7522 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7524 fold (build (code, type,
7525 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7527 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7528 TREE_OPERAND (arg0, 0),
7529 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7530 fold_convert (TREE_TYPE (arg0),
7535 /* If this is an NE or EQ comparison of zero against the result of a
7536 signed MOD operation whose second operand is a power of 2, make
7537 the MOD operation unsigned since it is simpler and equivalent. */
7538 if ((code == NE_EXPR || code == EQ_EXPR)
7539 && integer_zerop (arg1)
7540 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7541 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7542 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7543 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7544 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7545 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7547 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7548 tree newmod = build (TREE_CODE (arg0), newtype,
7549 fold_convert (newtype,
7550 TREE_OPERAND (arg0, 0)),
7551 fold_convert (newtype,
7552 TREE_OPERAND (arg0, 1)));
7554 return build (code, type, newmod, fold_convert (newtype, arg1));
7557 /* If this is an NE comparison of zero with an AND of one, remove the
7558 comparison since the AND will give the correct value. */
7559 if (code == NE_EXPR && integer_zerop (arg1)
7560 && TREE_CODE (arg0) == BIT_AND_EXPR
7561 && integer_onep (TREE_OPERAND (arg0, 1)))
7562 return fold_convert (type, arg0);
7564 /* If we have (A & C) == C where C is a power of 2, convert this into
7565 (A & C) != 0. Similarly for NE_EXPR. */
7566 if ((code == EQ_EXPR || code == NE_EXPR)
7567 && TREE_CODE (arg0) == BIT_AND_EXPR
7568 && integer_pow2p (TREE_OPERAND (arg0, 1))
7569 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7570 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7571 arg0, integer_zero_node));
7573 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7574 2, then fold the expression into shifts and logical operations. */
7575 tem = fold_single_bit_test (code, arg0, arg1, type);
7579 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7580 Similarly for NE_EXPR. */
7581 if ((code == EQ_EXPR || code == NE_EXPR)
7582 && TREE_CODE (arg0) == BIT_AND_EXPR
7583 && TREE_CODE (arg1) == INTEGER_CST
7584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7587 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7588 arg1, build1 (BIT_NOT_EXPR,
7589 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7590 TREE_OPERAND (arg0, 1))));
7591 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7592 if (integer_nonzerop (dandnotc))
7593 return omit_one_operand (type, rslt, arg0);
7596 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7597 Similarly for NE_EXPR. */
7598 if ((code == EQ_EXPR || code == NE_EXPR)
7599 && TREE_CODE (arg0) == BIT_IOR_EXPR
7600 && TREE_CODE (arg1) == INTEGER_CST
7601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7604 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7605 TREE_OPERAND (arg0, 1),
7606 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7607 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7608 if (integer_nonzerop (candnotd))
7609 return omit_one_operand (type, rslt, arg0);
7612 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7613 and similarly for >= into !=. */
7614 if ((code == LT_EXPR || code == GE_EXPR)
7615 && TREE_UNSIGNED (TREE_TYPE (arg0))
7616 && TREE_CODE (arg1) == LSHIFT_EXPR
7617 && integer_onep (TREE_OPERAND (arg1, 0)))
7618 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7619 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7620 TREE_OPERAND (arg1, 1)),
7621 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7623 else if ((code == LT_EXPR || code == GE_EXPR)
7624 && TREE_UNSIGNED (TREE_TYPE (arg0))
7625 && (TREE_CODE (arg1) == NOP_EXPR
7626 || TREE_CODE (arg1) == CONVERT_EXPR)
7627 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7628 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7630 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7631 fold_convert (TREE_TYPE (arg0),
7632 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7633 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7635 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7637 /* Simplify comparison of something with itself. (For IEEE
7638 floating-point, we can only do some of these simplifications.) */
7639 if (operand_equal_p (arg0, arg1, 0))
7644 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7645 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7646 return constant_boolean_node (1, type);
7651 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7652 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7653 return constant_boolean_node (1, type);
7654 return fold (build (EQ_EXPR, type, arg0, arg1));
7657 /* For NE, we can only do this simplification if integer
7658 or we don't honor IEEE floating point NaNs. */
7659 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7660 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7662 /* ... fall through ... */
7665 return constant_boolean_node (0, type);
7671 /* If we are comparing an expression that just has comparisons
7672 of two integer values, arithmetic expressions of those comparisons,
7673 and constants, we can simplify it. There are only three cases
7674 to check: the two values can either be equal, the first can be
7675 greater, or the second can be greater. Fold the expression for
7676 those three values. Since each value must be 0 or 1, we have
7677 eight possibilities, each of which corresponds to the constant 0
7678 or 1 or one of the six possible comparisons.
7680 This handles common cases like (a > b) == 0 but also handles
7681 expressions like ((x > y) - (y > x)) > 0, which supposedly
7682 occur in macroized code. */
7684 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7686 tree cval1 = 0, cval2 = 0;
7689 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7690 /* Don't handle degenerate cases here; they should already
7691 have been handled anyway. */
7692 && cval1 != 0 && cval2 != 0
7693 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7694 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7695 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7696 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7697 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7698 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7699 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7701 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7702 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7704 /* We can't just pass T to eval_subst in case cval1 or cval2
7705 was the same as ARG1. */
7708 = fold (build (code, type,
7709 eval_subst (arg0, cval1, maxval, cval2, minval),
7712 = fold (build (code, type,
7713 eval_subst (arg0, cval1, maxval, cval2, maxval),
7716 = fold (build (code, type,
7717 eval_subst (arg0, cval1, minval, cval2, maxval),
7720 /* All three of these results should be 0 or 1. Confirm they
7721 are. Then use those values to select the proper code
7724 if ((integer_zerop (high_result)
7725 || integer_onep (high_result))
7726 && (integer_zerop (equal_result)
7727 || integer_onep (equal_result))
7728 && (integer_zerop (low_result)
7729 || integer_onep (low_result)))
7731 /* Make a 3-bit mask with the high-order bit being the
7732 value for `>', the next for '=', and the low for '<'. */
7733 switch ((integer_onep (high_result) * 4)
7734 + (integer_onep (equal_result) * 2)
7735 + integer_onep (low_result))
7739 return omit_one_operand (type, integer_zero_node, arg0);
7760 return omit_one_operand (type, integer_one_node, arg0);
7763 t = build (code, type, cval1, cval2);
7765 return save_expr (t);
7772 /* If this is a comparison of a field, we may be able to simplify it. */
7773 if (((TREE_CODE (arg0) == COMPONENT_REF
7774 && (*lang_hooks.can_use_bit_fields_p) ())
7775 || TREE_CODE (arg0) == BIT_FIELD_REF)
7776 && (code == EQ_EXPR || code == NE_EXPR)
7777 /* Handle the constant case even without -O
7778 to make sure the warnings are given. */
7779 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7781 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7786 /* If this is a comparison of complex values and either or both sides
7787 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7788 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7789 This may prevent needless evaluations. */
7790 if ((code == EQ_EXPR || code == NE_EXPR)
7791 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7792 && (TREE_CODE (arg0) == COMPLEX_EXPR
7793 || TREE_CODE (arg1) == COMPLEX_EXPR
7794 || TREE_CODE (arg0) == COMPLEX_CST
7795 || TREE_CODE (arg1) == COMPLEX_CST))
7797 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7798 tree real0, imag0, real1, imag1;
7800 arg0 = save_expr (arg0);
7801 arg1 = save_expr (arg1);
7802 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7803 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7804 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7805 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7807 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7810 fold (build (code, type, real0, real1)),
7811 fold (build (code, type, imag0, imag1))));
7814 /* Optimize comparisons of strlen vs zero to a compare of the
7815 first character of the string vs zero. To wit,
7816 strlen(ptr) == 0 => *ptr == 0
7817 strlen(ptr) != 0 => *ptr != 0
7818 Other cases should reduce to one of these two (or a constant)
7819 due to the return value of strlen being unsigned. */
7820 if ((code == EQ_EXPR || code == NE_EXPR)
7821 && integer_zerop (arg1)
7822 && TREE_CODE (arg0) == CALL_EXPR)
7824 tree fndecl = get_callee_fndecl (arg0);
7828 && DECL_BUILT_IN (fndecl)
7829 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7830 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7831 && (arglist = TREE_OPERAND (arg0, 1))
7832 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7833 && ! TREE_CHAIN (arglist))
7834 return fold (build (code, type,
7835 build1 (INDIRECT_REF, char_type_node,
7836 TREE_VALUE(arglist)),
7837 integer_zero_node));
7840 /* From here on, the only cases we handle are when the result is
7841 known to be a constant.
7843 To compute GT, swap the arguments and do LT.
7844 To compute GE, do LT and invert the result.
7845 To compute LE, swap the arguments, do LT and invert the result.
7846 To compute NE, do EQ and invert the result.
7848 Therefore, the code below must handle only EQ and LT. */
7850 if (code == LE_EXPR || code == GT_EXPR)
7852 tem = arg0, arg0 = arg1, arg1 = tem;
7853 code = swap_tree_comparison (code);
7856 /* Note that it is safe to invert for real values here because we
7857 will check below in the one case that it matters. */
7861 if (code == NE_EXPR || code == GE_EXPR)
7864 code = invert_tree_comparison (code);
7867 /* Compute a result for LT or EQ if args permit;
7868 otherwise return T. */
7869 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7871 if (code == EQ_EXPR)
7872 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7874 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7875 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7876 : INT_CST_LT (arg0, arg1)),
7880 #if 0 /* This is no longer useful, but breaks some real code. */
7881 /* Assume a nonexplicit constant cannot equal an explicit one,
7882 since such code would be undefined anyway.
7883 Exception: on sysvr4, using #pragma weak,
7884 a label can come out as 0. */
7885 else if (TREE_CODE (arg1) == INTEGER_CST
7886 && !integer_zerop (arg1)
7887 && TREE_CONSTANT (arg0)
7888 && TREE_CODE (arg0) == ADDR_EXPR
7890 t1 = build_int_2 (0, 0);
7892 /* Two real constants can be compared explicitly. */
7893 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7895 /* If either operand is a NaN, the result is false with two
7896 exceptions: First, an NE_EXPR is true on NaNs, but that case
7897 is already handled correctly since we will be inverting the
7898 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7899 or a GE_EXPR into a LT_EXPR, we must return true so that it
7900 will be inverted into false. */
7902 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7903 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7904 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7906 else if (code == EQ_EXPR)
7907 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7908 TREE_REAL_CST (arg1)),
7911 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7912 TREE_REAL_CST (arg1)),
7916 if (t1 == NULL_TREE)
7920 TREE_INT_CST_LOW (t1) ^= 1;
7922 TREE_TYPE (t1) = type;
7923 if (TREE_CODE (type) == BOOLEAN_TYPE)
7924 return (*lang_hooks.truthvalue_conversion) (t1);
7928 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7929 so all simple results must be passed through pedantic_non_lvalue. */
7930 if (TREE_CODE (arg0) == INTEGER_CST)
7932 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7933 /* Only optimize constant conditions when the selected branch
7934 has the same type as the COND_EXPR. This avoids optimizing
7935 away "c ? x : throw", where the throw has a void type. */
7936 if (! VOID_TYPE_P (TREE_TYPE (tem))
7937 || VOID_TYPE_P (TREE_TYPE (t)))
7938 return pedantic_non_lvalue (tem);
7941 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7942 return pedantic_omit_one_operand (type, arg1, arg0);
7944 /* If we have A op B ? A : C, we may be able to convert this to a
7945 simpler expression, depending on the operation and the values
7946 of B and C. Signed zeros prevent all of these transformations,
7947 for reasons given above each one. */
7949 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7950 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7951 arg1, TREE_OPERAND (arg0, 1))
7952 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7954 tree arg2 = TREE_OPERAND (t, 2);
7955 enum tree_code comp_code = TREE_CODE (arg0);
7959 /* If we have A op 0 ? A : -A, consider applying the following
7962 A == 0? A : -A same as -A
7963 A != 0? A : -A same as A
7964 A >= 0? A : -A same as abs (A)
7965 A > 0? A : -A same as abs (A)
7966 A <= 0? A : -A same as -abs (A)
7967 A < 0? A : -A same as -abs (A)
7969 None of these transformations work for modes with signed
7970 zeros. If A is +/-0, the first two transformations will
7971 change the sign of the result (from +0 to -0, or vice
7972 versa). The last four will fix the sign of the result,
7973 even though the original expressions could be positive or
7974 negative, depending on the sign of A.
7976 Note that all these transformations are correct if A is
7977 NaN, since the two alternatives (A and -A) are also NaNs. */
7978 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7979 ? real_zerop (TREE_OPERAND (arg0, 1))
7980 : integer_zerop (TREE_OPERAND (arg0, 1)))
7981 && TREE_CODE (arg2) == NEGATE_EXPR
7982 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7986 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7987 tem = fold_convert (type, negate_expr (tem));
7988 return pedantic_non_lvalue (tem);
7990 return pedantic_non_lvalue (fold_convert (type, arg1));
7993 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7994 arg1 = fold_convert ((*lang_hooks.types.signed_type)
7995 (TREE_TYPE (arg1)), arg1);
7996 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7997 return pedantic_non_lvalue (fold_convert (type, arg1));
8000 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8001 arg1 = fold_convert ((lang_hooks.types.signed_type)
8002 (TREE_TYPE (arg1)), arg1);
8003 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8004 arg1 = negate_expr (fold_convert (type, arg1));
8005 return pedantic_non_lvalue (arg1);
8010 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8011 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8012 both transformations are correct when A is NaN: A != 0
8013 is then true, and A == 0 is false. */
8015 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8017 if (comp_code == NE_EXPR)
8018 return pedantic_non_lvalue (fold_convert (type, arg1));
8019 else if (comp_code == EQ_EXPR)
8020 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8023 /* Try some transformations of A op B ? A : B.
8025 A == B? A : B same as B
8026 A != B? A : B same as A
8027 A >= B? A : B same as max (A, B)
8028 A > B? A : B same as max (B, A)
8029 A <= B? A : B same as min (A, B)
8030 A < B? A : B same as min (B, A)
8032 As above, these transformations don't work in the presence
8033 of signed zeros. For example, if A and B are zeros of
8034 opposite sign, the first two transformations will change
8035 the sign of the result. In the last four, the original
8036 expressions give different results for (A=+0, B=-0) and
8037 (A=-0, B=+0), but the transformed expressions do not.
8039 The first two transformations are correct if either A or B
8040 is a NaN. In the first transformation, the condition will
8041 be false, and B will indeed be chosen. In the case of the
8042 second transformation, the condition A != B will be true,
8043 and A will be chosen.
8045 The conversions to max() and min() are not correct if B is
8046 a number and A is not. The conditions in the original
8047 expressions will be false, so all four give B. The min()
8048 and max() versions would give a NaN instead. */
8049 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8050 arg2, TREE_OPERAND (arg0, 0)))
8052 tree comp_op0 = TREE_OPERAND (arg0, 0);
8053 tree comp_op1 = TREE_OPERAND (arg0, 1);
8054 tree comp_type = TREE_TYPE (comp_op0);
8056 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8057 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8067 return pedantic_non_lvalue (fold_convert (type, arg2));
8069 return pedantic_non_lvalue (fold_convert (type, arg1));
8072 /* In C++ a ?: expression can be an lvalue, so put the
8073 operand which will be used if they are equal first
8074 so that we can convert this back to the
8075 corresponding COND_EXPR. */
8076 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8077 return pedantic_non_lvalue (fold_convert
8078 (type, fold (build (MIN_EXPR, comp_type,
8079 (comp_code == LE_EXPR
8080 ? comp_op0 : comp_op1),
8081 (comp_code == LE_EXPR
8082 ? comp_op1 : comp_op0)))));
8086 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8087 return pedantic_non_lvalue (fold_convert
8088 (type, fold (build (MAX_EXPR, comp_type,
8089 (comp_code == GE_EXPR
8090 ? comp_op0 : comp_op1),
8091 (comp_code == GE_EXPR
8092 ? comp_op1 : comp_op0)))));
8099 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8100 we might still be able to simplify this. For example,
8101 if C1 is one less or one more than C2, this might have started
8102 out as a MIN or MAX and been transformed by this function.
8103 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8105 if (INTEGRAL_TYPE_P (type)
8106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8107 && TREE_CODE (arg2) == INTEGER_CST)
8111 /* We can replace A with C1 in this case. */
8112 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8113 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8114 TREE_OPERAND (t, 2)));
8117 /* If C1 is C2 + 1, this is min(A, C2). */
8118 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8119 && operand_equal_p (TREE_OPERAND (arg0, 1),
8120 const_binop (PLUS_EXPR, arg2,
8121 integer_one_node, 0), 1))
8122 return pedantic_non_lvalue
8123 (fold (build (MIN_EXPR, type, arg1, arg2)));
8127 /* If C1 is C2 - 1, this is min(A, C2). */
8128 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8129 && operand_equal_p (TREE_OPERAND (arg0, 1),
8130 const_binop (MINUS_EXPR, arg2,
8131 integer_one_node, 0), 1))
8132 return pedantic_non_lvalue
8133 (fold (build (MIN_EXPR, type, arg1, arg2)));
8137 /* If C1 is C2 - 1, this is max(A, C2). */
8138 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8139 && operand_equal_p (TREE_OPERAND (arg0, 1),
8140 const_binop (MINUS_EXPR, arg2,
8141 integer_one_node, 0), 1))
8142 return pedantic_non_lvalue
8143 (fold (build (MAX_EXPR, type, arg1, arg2)));
8147 /* If C1 is C2 + 1, this is max(A, C2). */
8148 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8149 && operand_equal_p (TREE_OPERAND (arg0, 1),
8150 const_binop (PLUS_EXPR, arg2,
8151 integer_one_node, 0), 1))
8152 return pedantic_non_lvalue
8153 (fold (build (MAX_EXPR, type, arg1, arg2)));
8162 /* If the second operand is simpler than the third, swap them
8163 since that produces better jump optimization results. */
8164 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8165 TREE_OPERAND (t, 2), false))
8167 /* See if this can be inverted. If it can't, possibly because
8168 it was a floating-point inequality comparison, don't do
8170 tem = invert_truthvalue (arg0);
8172 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8173 return fold (build (code, type, tem,
8174 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8177 /* Convert A ? 1 : 0 to simply A. */
8178 if (integer_onep (TREE_OPERAND (t, 1))
8179 && integer_zerop (TREE_OPERAND (t, 2))
8180 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8181 call to fold will try to move the conversion inside
8182 a COND, which will recurse. In that case, the COND_EXPR
8183 is probably the best choice, so leave it alone. */
8184 && type == TREE_TYPE (arg0))
8185 return pedantic_non_lvalue (arg0);
8187 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8188 over COND_EXPR in cases such as floating point comparisons. */
8189 if (integer_zerop (TREE_OPERAND (t, 1))
8190 && integer_onep (TREE_OPERAND (t, 2))
8191 && truth_value_p (TREE_CODE (arg0)))
8192 return pedantic_non_lvalue (fold_convert (type,
8193 invert_truthvalue (arg0)));
8195 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8196 operation is simply A & 2. */
8198 if (integer_zerop (TREE_OPERAND (t, 2))
8199 && TREE_CODE (arg0) == NE_EXPR
8200 && integer_zerop (TREE_OPERAND (arg0, 1))
8201 && integer_pow2p (arg1)
8202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8203 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8205 return pedantic_non_lvalue (fold_convert (type,
8206 TREE_OPERAND (arg0, 0)));
8208 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8209 if (integer_zerop (TREE_OPERAND (t, 2))
8210 && truth_value_p (TREE_CODE (arg0))
8211 && truth_value_p (TREE_CODE (arg1)))
8212 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8215 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8216 if (integer_onep (TREE_OPERAND (t, 2))
8217 && truth_value_p (TREE_CODE (arg0))
8218 && truth_value_p (TREE_CODE (arg1)))
8220 /* Only perform transformation if ARG0 is easily inverted. */
8221 tem = invert_truthvalue (arg0);
8222 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8223 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8230 /* When pedantic, a compound expression can be neither an lvalue
8231 nor an integer constant expression. */
8232 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8234 /* Don't let (0, 0) be null pointer constant. */
8235 if (integer_zerop (arg1))
8236 return build1 (NOP_EXPR, type, arg1);
8237 return fold_convert (type, arg1);
8241 return build_complex (type, arg0, arg1);
8245 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8247 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8248 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8249 TREE_OPERAND (arg0, 1));
8250 else if (TREE_CODE (arg0) == COMPLEX_CST)
8251 return TREE_REALPART (arg0);
8252 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8253 return fold (build (TREE_CODE (arg0), type,
8254 fold (build1 (REALPART_EXPR, type,
8255 TREE_OPERAND (arg0, 0))),
8256 fold (build1 (REALPART_EXPR,
8257 type, TREE_OPERAND (arg0, 1)))));
8261 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8262 return fold_convert (type, integer_zero_node);
8263 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8264 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8265 TREE_OPERAND (arg0, 0));
8266 else if (TREE_CODE (arg0) == COMPLEX_CST)
8267 return TREE_IMAGPART (arg0);
8268 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8269 return fold (build (TREE_CODE (arg0), type,
8270 fold (build1 (IMAGPART_EXPR, type,
8271 TREE_OPERAND (arg0, 0))),
8272 fold (build1 (IMAGPART_EXPR, type,
8273 TREE_OPERAND (arg0, 1)))));
8276 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8278 case CLEANUP_POINT_EXPR:
8279 if (! has_cleanups (arg0))
8280 return TREE_OPERAND (t, 0);
8283 enum tree_code code0 = TREE_CODE (arg0);
8284 int kind0 = TREE_CODE_CLASS (code0);
8285 tree arg00 = TREE_OPERAND (arg0, 0);
8288 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8289 return fold (build1 (code0, type,
8290 fold (build1 (CLEANUP_POINT_EXPR,
8291 TREE_TYPE (arg00), arg00))));
8293 if (kind0 == '<' || kind0 == '2'
8294 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8295 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8296 || code0 == TRUTH_XOR_EXPR)
8298 arg01 = TREE_OPERAND (arg0, 1);
8300 if (TREE_CONSTANT (arg00)
8301 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8302 && ! has_cleanups (arg00)))
8303 return fold (build (code0, type, arg00,
8304 fold (build1 (CLEANUP_POINT_EXPR,
8305 TREE_TYPE (arg01), arg01))));
8307 if (TREE_CONSTANT (arg01))
8308 return fold (build (code0, type,
8309 fold (build1 (CLEANUP_POINT_EXPR,
8310 TREE_TYPE (arg00), arg00)),
8318 /* Check for a built-in function. */
8319 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8320 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8322 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8324 tree tmp = fold_builtin (expr);
8332 } /* switch (code) */
8335 #ifdef ENABLE_FOLD_CHECKING
8338 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8339 static void fold_check_failed (tree, tree);
8340 void print_fold_checksum (tree);
8342 /* When --enable-checking=fold, compute a digest of expr before
8343 and after actual fold call to see if fold did not accidentally
8344 change original expr. */
8351 unsigned char checksum_before[16], checksum_after[16];
8354 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8355 md5_init_ctx (&ctx);
8356 fold_checksum_tree (expr, &ctx, ht);
8357 md5_finish_ctx (&ctx, checksum_before);
8360 ret = fold_1 (expr);
8362 md5_init_ctx (&ctx);
8363 fold_checksum_tree (expr, &ctx, ht);
8364 md5_finish_ctx (&ctx, checksum_after);
8367 if (memcmp (checksum_before, checksum_after, 16))
8368 fold_check_failed (expr, ret);
8374 print_fold_checksum (tree expr)
8377 unsigned char checksum[16], cnt;
8380 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8381 md5_init_ctx (&ctx);
8382 fold_checksum_tree (expr, &ctx, ht);
8383 md5_finish_ctx (&ctx, checksum);
8385 for (cnt = 0; cnt < 16; ++cnt)
8386 fprintf (stderr, "%02x", checksum[cnt]);
8387 putc ('\n', stderr);
8391 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8393 internal_error ("fold check: original tree changed by fold");
8397 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8400 enum tree_code code;
8401 char buf[sizeof (struct tree_decl)];
8404 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8405 > sizeof (struct tree_decl)
8406 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8410 slot = htab_find_slot (ht, expr, INSERT);
8414 code = TREE_CODE (expr);
8415 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8417 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8418 memcpy (buf, expr, tree_size (expr));
8420 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8422 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8424 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8425 memcpy (buf, expr, tree_size (expr));
8427 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8429 else if (TREE_CODE_CLASS (code) == 't'
8430 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8432 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8433 memcpy (buf, expr, tree_size (expr));
8435 TYPE_POINTER_TO (expr) = NULL;
8436 TYPE_REFERENCE_TO (expr) = NULL;
8438 md5_process_bytes (expr, tree_size (expr), ctx);
8439 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8440 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8441 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8442 len = TREE_CODE_LENGTH (code);
8443 switch (TREE_CODE_CLASS (code))
8449 md5_process_bytes (TREE_STRING_POINTER (expr),
8450 TREE_STRING_LENGTH (expr), ctx);
8453 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8454 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8457 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8467 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8468 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8471 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8472 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8481 case SAVE_EXPR: len = 2; break;
8482 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8483 case RTL_EXPR: len = 0; break;
8484 case WITH_CLEANUP_EXPR: len = 2; break;
8493 for (i = 0; i < len; ++i)
8494 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8497 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8498 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8499 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8500 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8501 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8502 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8503 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8504 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8505 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8506 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8507 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8510 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8511 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8512 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8513 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8514 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8515 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8516 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8517 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8518 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8519 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8528 /* Perform constant folding and related simplification of initializer
8529 expression EXPR. This behaves identically to "fold" but ignores
8530 potential run-time traps and exceptions that fold must preserve. */
8533 fold_initializer (tree expr)
8535 int saved_signaling_nans = flag_signaling_nans;
8536 int saved_trapping_math = flag_trapping_math;
8537 int saved_trapv = flag_trapv;
8540 flag_signaling_nans = 0;
8541 flag_trapping_math = 0;
8544 result = fold (expr);
8546 flag_signaling_nans = saved_signaling_nans;
8547 flag_trapping_math = saved_trapping_math;
8548 flag_trapv = saved_trapv;
8553 /* Determine if first argument is a multiple of second argument. Return 0 if
8554 it is not, or we cannot easily determined it to be.
8556 An example of the sort of thing we care about (at this point; this routine
8557 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8558 fold cases do now) is discovering that
8560 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8566 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8568 This code also handles discovering that
8570 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8572 is a multiple of 8 so we don't have to worry about dealing with a
8575 Note that we *look* inside a SAVE_EXPR only to determine how it was
8576 calculated; it is not safe for fold to do much of anything else with the
8577 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8578 at run time. For example, the latter example above *cannot* be implemented
8579 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8580 evaluation time of the original SAVE_EXPR is not necessarily the same at
8581 the time the new expression is evaluated. The only optimization of this
8582 sort that would be valid is changing
8584 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8588 SAVE_EXPR (I) * SAVE_EXPR (J)
8590 (where the same SAVE_EXPR (J) is used in the original and the
8591 transformed version). */
8594 multiple_of_p (tree type, tree top, tree bottom)
8596 if (operand_equal_p (top, bottom, 0))
8599 if (TREE_CODE (type) != INTEGER_TYPE)
8602 switch (TREE_CODE (top))
8605 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8606 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8610 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8611 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8614 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8618 op1 = TREE_OPERAND (top, 1);
8619 /* const_binop may not detect overflow correctly,
8620 so check for it explicitly here. */
8621 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8622 > TREE_INT_CST_LOW (op1)
8623 && TREE_INT_CST_HIGH (op1) == 0
8624 && 0 != (t1 = fold_convert (type,
8625 const_binop (LSHIFT_EXPR,
8628 && ! TREE_OVERFLOW (t1))
8629 return multiple_of_p (type, t1, bottom);
8634 /* Can't handle conversions from non-integral or wider integral type. */
8635 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8636 || (TYPE_PRECISION (type)
8637 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8640 /* .. fall through ... */
8643 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8646 if (TREE_CODE (bottom) != INTEGER_CST
8647 || (TREE_UNSIGNED (type)
8648 && (tree_int_cst_sgn (top) < 0
8649 || tree_int_cst_sgn (bottom) < 0)))
8651 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8659 /* Return true if `t' is known to be non-negative. */
8662 tree_expr_nonnegative_p (tree t)
8664 switch (TREE_CODE (t))
8670 return tree_int_cst_sgn (t) >= 0;
8673 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8676 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8677 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8678 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8680 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8681 both unsigned and at least 2 bits shorter than the result. */
8682 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8683 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8684 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8686 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8687 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8688 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8689 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8691 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8692 TYPE_PRECISION (inner2)) + 1;
8693 return prec < TYPE_PRECISION (TREE_TYPE (t));
8699 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8701 /* x * x for floating point x is always non-negative. */
8702 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8704 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8705 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8708 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8709 both unsigned and their total bits is shorter than the result. */
8710 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8711 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8712 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8714 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8715 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8716 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8717 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8718 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8719 < TYPE_PRECISION (TREE_TYPE (t));
8723 case TRUNC_DIV_EXPR:
8725 case FLOOR_DIV_EXPR:
8726 case ROUND_DIV_EXPR:
8727 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8728 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8730 case TRUNC_MOD_EXPR:
8732 case FLOOR_MOD_EXPR:
8733 case ROUND_MOD_EXPR:
8734 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8737 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8738 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8742 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8743 tree outer_type = TREE_TYPE (t);
8745 if (TREE_CODE (outer_type) == REAL_TYPE)
8747 if (TREE_CODE (inner_type) == REAL_TYPE)
8748 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8749 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8751 if (TREE_UNSIGNED (inner_type))
8753 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8756 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8758 if (TREE_CODE (inner_type) == REAL_TYPE)
8759 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8760 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8761 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8762 && TREE_UNSIGNED (inner_type);
8768 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8769 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8771 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8773 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8774 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8776 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8777 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8779 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8781 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8783 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8784 case NON_LVALUE_EXPR:
8785 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8787 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8789 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8793 tree fndecl = get_callee_fndecl (t);
8794 tree arglist = TREE_OPERAND (t, 1);
8796 && DECL_BUILT_IN (fndecl)
8797 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8798 switch (DECL_FUNCTION_CODE (fndecl))
8801 case BUILT_IN_CABSL:
8802 case BUILT_IN_CABSF:
8807 case BUILT_IN_EXP2F:
8808 case BUILT_IN_EXP2L:
8809 case BUILT_IN_EXP10:
8810 case BUILT_IN_EXP10F:
8811 case BUILT_IN_EXP10L:
8813 case BUILT_IN_FABSF:
8814 case BUILT_IN_FABSL:
8817 case BUILT_IN_FFSLL:
8818 case BUILT_IN_PARITY:
8819 case BUILT_IN_PARITYL:
8820 case BUILT_IN_PARITYLL:
8821 case BUILT_IN_POPCOUNT:
8822 case BUILT_IN_POPCOUNTL:
8823 case BUILT_IN_POPCOUNTLL:
8824 case BUILT_IN_POW10:
8825 case BUILT_IN_POW10F:
8826 case BUILT_IN_POW10L:
8828 case BUILT_IN_SQRTF:
8829 case BUILT_IN_SQRTL:
8833 case BUILT_IN_ATANF:
8834 case BUILT_IN_ATANL:
8836 case BUILT_IN_CEILF:
8837 case BUILT_IN_CEILL:
8838 case BUILT_IN_FLOOR:
8839 case BUILT_IN_FLOORF:
8840 case BUILT_IN_FLOORL:
8841 case BUILT_IN_NEARBYINT:
8842 case BUILT_IN_NEARBYINTF:
8843 case BUILT_IN_NEARBYINTL:
8844 case BUILT_IN_ROUND:
8845 case BUILT_IN_ROUNDF:
8846 case BUILT_IN_ROUNDL:
8847 case BUILT_IN_TRUNC:
8848 case BUILT_IN_TRUNCF:
8849 case BUILT_IN_TRUNCL:
8850 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8855 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8862 /* ... fall through ... */
8865 if (truth_value_p (TREE_CODE (t)))
8866 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8870 /* We don't know sign of `t', so be conservative and return false. */
8874 /* Return true if `r' is known to be non-negative.
8875 Only handles constants at the moment. */
8878 rtl_expr_nonnegative_p (rtx r)
8880 switch (GET_CODE (r))
8883 return INTVAL (r) >= 0;
8886 if (GET_MODE (r) == VOIDmode)
8887 return CONST_DOUBLE_HIGH (r) >= 0;
8895 units = CONST_VECTOR_NUNITS (r);
8897 for (i = 0; i < units; ++i)
8899 elt = CONST_VECTOR_ELT (r, i);
8900 if (!rtl_expr_nonnegative_p (elt))
8909 /* These are always nonnegative. */
8917 #include "gt-fold-const.h"